From a23e3d8a389bff5c1b8e4b7d90bd184535b94799 Mon Sep 17 00:00:00 2001 From: Simon Dumas Date: Tue, 17 Sep 2024 13:47:42 +0200 Subject: [PATCH] Remove storage server implementation --- .../workflows/ci-delta-static-analysis.yml | 1 - .github/workflows/ci-integration-tests.yml | 6 +- .github/workflows/ci-release-docker.yml | 1 - .github/workflows/ci-snapshot.yml | 2 - .github/workflows/ci-storage.yml | 45 -- build.sbt | 99 +-- .../RemoteStorageClientFixtures.scala | 2 +- storage/permissions-fixer/Cargo.toml | 15 - storage/permissions-fixer/Dockerfile | 11 - storage/permissions-fixer/README.md | 41 -- storage/permissions-fixer/build.rs | 5 - storage/permissions-fixer/src/config.rs | 56 -- storage/permissions-fixer/src/errors.rs | 55 -- storage/permissions-fixer/src/main.rs | 41 -- storage/permissions-fixer/src/path.rs | 266 -------- .../tests/unit-tests-with-env.sh | 21 - storage/src/main/resources/akka.conf | 26 - storage/src/main/resources/app.conf | 118 ---- storage/src/main/resources/application.conf | 3 - storage/src/main/resources/kamon.conf | 68 -- storage/src/main/resources/logback.xml | 34 - .../epfl/bluebrain/nexus/storage/File.scala | 75 --- .../nexus/storage/JsonLdCirceSupport.scala | 100 --- .../epfl/bluebrain/nexus/storage/Main.scala | 112 ---- .../bluebrain/nexus/storage/MediaTypes.scala | 9 - .../bluebrain/nexus/storage/Rejection.scala | 81 --- .../nexus/storage/StorageError.scala | 110 ---- .../bluebrain/nexus/storage/Storages.scala | 340 ---------- .../nexus/storage/StringProcessLogger.scala | 51 -- .../bluebrain/nexus/storage/TarFlow.scala | 76 --- .../bluebrain/nexus/storage/UriUtils.scala | 25 - .../storage/attributes/AttributesCache.scala | 80 --- .../attributes/AttributesCacheActor.scala | 158 ----- .../attributes/AttributesComputation.scala | 80 --- .../attributes/ContentTypeDetector.scala | 45 -- .../storage/auth/AuthorizationError.scala | 20 - .../storage/auth/AuthorizationMethod.scala | 67 -- .../nexus/storage/config/AppConfig.scala | 136 ---- .../nexus/storage/config/Contexts.scala | 12 - .../nexus/storage/config/Settings.scala | 35 - .../nexus/storage/files/CopyFileOutput.scala | 19 - .../nexus/storage/files/ValidateFile.scala | 175 ----- .../nexus/storage/jsonld/JsonLdContext.scala | 37 -- .../bluebrain/nexus/storage/package.scala | 99 --- .../nexus/storage/routes/AppInfoRoutes.scala | 50 -- .../nexus/storage/routes/AuthDirectives.scala | 35 - .../nexus/storage/routes/CopyFile.scala | 11 - .../storage/routes/PrefixDirectives.scala | 40 -- .../storage/routes/RejectionHandling.scala | 230 ------- .../nexus/storage/routes/Routes.scala | 99 --- .../nexus/storage/routes/StatusFrom.scala | 37 -- .../storage/routes/StorageDirectives.scala | 146 ----- .../nexus/storage/routes/StorageRoutes.scala | 150 ----- .../nexus/storage/routes/instances.scala | 129 ---- storage/src/test/resources/app-info.json | 4 - storage/src/test/resources/app.conf | 7 - .../resources/content-type/file-example.json | 3 - .../test/resources/content-type/file.custom | 1 - .../test/resources/content-type/no-extension | 1 - storage/src/test/resources/error.json | 5 - storage/src/test/resources/file-created.json | 10 - storage/src/test/resources/file-link.json | 3 - .../nexus/storage/DiskStorageSpec.scala | 537 --------------- .../storage/StringProcessLoggerSpec.scala | 27 - .../bluebrain/nexus/storage/TarFlowSpec.scala | 82 --- .../attributes/AttributesCacheSpec.scala | 161 ----- .../AttributesComputationSpec.scala | 49 -- .../attributes/ContentTypeDetectorSuite.scala | 39 -- .../auth/AuthorizationMethodSuite.scala | 137 ---- .../storage/routes/AppInfoRoutesSpec.scala | 33 - .../storage/routes/AuthDirectivesSpec.scala | 107 --- .../routes/StorageDirectivesSpec.scala | 83 --- .../storage/routes/StorageRoutesSpec.scala | 611 ------------------ .../nexus/storage/utils/Randomness.scala | 11 - .../nexus/storage/utils/Resources.scala | 68 -- tests/docker/docker-compose.yml | 2 +- 76 files changed, 6 insertions(+), 5760 deletions(-) delete mode 100644 .github/workflows/ci-storage.yml delete mode 100644 storage/permissions-fixer/Cargo.toml delete mode 100644 storage/permissions-fixer/Dockerfile delete mode 100644 storage/permissions-fixer/README.md delete mode 100644 storage/permissions-fixer/build.rs delete mode 100644 storage/permissions-fixer/src/config.rs delete mode 100644 storage/permissions-fixer/src/errors.rs delete mode 100644 storage/permissions-fixer/src/main.rs delete mode 100644 storage/permissions-fixer/src/path.rs delete mode 100755 storage/permissions-fixer/tests/unit-tests-with-env.sh delete mode 100644 storage/src/main/resources/akka.conf delete mode 100644 storage/src/main/resources/app.conf delete mode 100644 storage/src/main/resources/application.conf delete mode 100644 storage/src/main/resources/kamon.conf delete mode 100644 storage/src/main/resources/logback.xml delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/File.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/JsonLdCirceSupport.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/MediaTypes.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Rejection.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StorageError.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Storages.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLogger.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/TarFlow.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/UriUtils.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCache.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheActor.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputation.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetector.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Contexts.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Settings.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/CopyFileOutput.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/ValidateFile.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/jsonld/JsonLdContext.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/package.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutes.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/CopyFile.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/PrefixDirectives.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/RejectionHandling.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StatusFrom.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectives.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala delete mode 100644 storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala delete mode 100644 storage/src/test/resources/app-info.json delete mode 100644 storage/src/test/resources/app.conf delete mode 100644 storage/src/test/resources/content-type/file-example.json delete mode 100644 storage/src/test/resources/content-type/file.custom delete mode 100644 storage/src/test/resources/content-type/no-extension delete mode 100644 storage/src/test/resources/error.json delete mode 100644 storage/src/test/resources/file-created.json delete mode 100644 storage/src/test/resources/file-link.json delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/DiskStorageSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLoggerSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/TarFlowSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputationSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetectorSuite.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectivesSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Randomness.scala delete mode 100644 storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Resources.scala diff --git a/.github/workflows/ci-delta-static-analysis.yml b/.github/workflows/ci-delta-static-analysis.yml index 1c0785a7b3..af1182b129 100644 --- a/.github/workflows/ci-delta-static-analysis.yml +++ b/.github/workflows/ci-delta-static-analysis.yml @@ -4,7 +4,6 @@ on: paths: - 'delta/**' - 'ship/**' - - 'storage/**' - 'build.sbt' - 'project/**' - '.github/workflows/ci-delta-static-analysis.yml' diff --git a/.github/workflows/ci-integration-tests.yml b/.github/workflows/ci-integration-tests.yml index 79c2594aa1..ab972b0976 100644 --- a/.github/workflows/ci-integration-tests.yml +++ b/.github/workflows/ci-integration-tests.yml @@ -3,7 +3,6 @@ on: pull_request: paths: - 'delta/**' - - 'storage/**' - 'tests/**' - 'build.sbt' - 'project/**' @@ -26,12 +25,11 @@ jobs: java-version: '21' cache: 'sbt' check-latest: true - - name: Clean, build Delta & Storage images + - name: Clean, build Delta image run: | sbt -Dsbt.color=always -Dsbt.supershell=false \ clean \ - app/Docker/publishLocal \ - storage/Docker/publishLocal + app/Docker/publishLocal - name: Start services run: docker compose -f tests/docker/docker-compose.yml up -d - name: Waiting for Delta to start diff --git a/.github/workflows/ci-release-docker.yml b/.github/workflows/ci-release-docker.yml index fd7fba25be..9b7612aab9 100644 --- a/.github/workflows/ci-release-docker.yml +++ b/.github/workflows/ci-release-docker.yml @@ -39,5 +39,4 @@ jobs: echo ${{ secrets.DOCKER_PASS }} | docker login --username ${{ secrets.DOCKER_USER }} --password-stdin sbt -Dsbt.color=always -Dsbt.supershell=false \ app/Docker/publish \ - storage/Docker/publish \ ship/Docker/publish \ No newline at end of file diff --git a/.github/workflows/ci-snapshot.yml b/.github/workflows/ci-snapshot.yml index 29c97aa764..5bee308812 100644 --- a/.github/workflows/ci-snapshot.yml +++ b/.github/workflows/ci-snapshot.yml @@ -6,7 +6,6 @@ on: paths: - 'cli/**' - 'delta/**' - - 'storage/**' - 'build.sbt' - 'project/**' - 'ship/**' @@ -47,5 +46,4 @@ jobs: echo ${{ secrets.DOCKER_PASS }} | docker login --username ${{ secrets.DOCKER_USER }} --password-stdin sbt -Dsbt.color=always -Dsbt.supershell=false \ app/Docker/publish \ - storage/Docker/publish \ ship/Docker/publish diff --git a/.github/workflows/ci-storage.yml b/.github/workflows/ci-storage.yml deleted file mode 100644 index 2b8aae74a9..0000000000 --- a/.github/workflows/ci-storage.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: Storage Integration Service -on: - pull_request: - paths: - - 'storage/**' - - 'build.sbt' - - 'project/**' - - '.github/workflows/ci-storage.yml' -jobs: - run: - if: github.event_name == 'pull_request' - runs-on: ubuntu-latest - timeout-minutes: 20 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Setup JDK - uses: actions/setup-java@v4 - with: - distribution: 'temurin' - java-version: '21' - cache: 'sbt' - check-latest: true - - name: StaticAnalysis - run: sbt -Dsbt.color=always -Dsbt.supershell=false "project storage" clean scalafmtCheck Test/scalafmtCheck scalafmtSbtCheck scapegoat - - name: Tests - run: sbt -Dsbt.color=always -Dsbt.supershell=false "project storage" clean coverage test coverageReport coverageAggregate - - name: Assembly - run: sbt -Dsbt.color=always -Dsbt.supershell=false "project storage" assembly - review-permission-fixer: - if: github.event_name == 'pull_request' - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Build image - run: docker build ./storage/permissions-fixer --tag=nexus/fixer - - name: Compile and run tests - run: docker run nexus/fixer diff --git a/build.sbt b/build.sbt index 4f916e321a..539de2b3da 100755 --- a/build.sbt +++ b/build.sbt @@ -20,8 +20,6 @@ val akkaHttpCirceVersion = "1.39.2" val akkaCorsVersion = "1.2.0" val akkaVersion = "2.6.21" val alpakkaVersion = "3.0.4" -val apacheCompressVersion = "1.27.1" -val apacheIOVersion = "2.16.1" val awsSdkVersion = "2.28.1" val betterMonadicForVersion = "0.3.1" val caffeineVersion = "3.1.8" @@ -75,8 +73,6 @@ lazy val akkaTestKit = "com.typesafe.akka" %% "akka lazy val akkaTestKitTyped = "com.typesafe.akka" %% "akka-actor-testkit-typed" % akkaVersion lazy val alpakkaFile = "com.lightbend.akka" %% "akka-stream-alpakka-file" % alpakkaVersion lazy val alpakkaSse = "com.lightbend.akka" %% "akka-stream-alpakka-sse" % alpakkaVersion -lazy val apacheCompress = "org.apache.commons" % "commons-compress" % apacheCompressVersion -lazy val apacheIO = "commons-io" % "commons-io" % apacheIOVersion lazy val awsSdk = "software.amazon.awssdk" % "s3" % awsSdkVersion lazy val betterMonadicFor = "com.olegpy" %% "better-monadic-for" % betterMonadicForVersion lazy val caffeine = "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion @@ -763,78 +759,6 @@ lazy val ship = project Docker / packageName := "nexus-ship" ) -lazy val cargo = taskKey[(File, String)]("Run Cargo to build 'nexus-fixer'") - -lazy val storage = project - .in(file("storage")) - .enablePlugins(UniversalPlugin, UniversalDeployPlugin, JavaAppPackaging, JavaAgent, DockerPlugin, BuildInfoPlugin) - .settings( - shared, - compilation, - assertJavaVersion, - kamonSettings, - storageAssemblySettings, - coverage, - release, - servicePackaging, - addArtifact(Artifact("delta-storage-app", "application"), assembly), - coverageMinimumStmtTotal := 75 - ) - .dependsOn(kernel, testkit % "test->compile") - .settings(cargo := { - import scala.sys.process._ - - val log = streams.value.log - val cmd = Process(Seq("cargo", "build", "--release"), baseDirectory.value / "permissions-fixer") - if (cmd.! == 0) { - log.success("Cargo build successful.") - (baseDirectory.value / "permissions-fixer" / "target" / "release" / "nexus-fixer") -> "bin/nexus-fixer" - } else { - log.error("Cargo build failed.") - throw new RuntimeException - } - }) - .settings( - name := "storage", - moduleName := "storage", - buildInfoKeys := Seq[BuildInfoKey](version), - buildInfoPackage := "ch.epfl.bluebrain.nexus.storage.config", - Docker / packageName := "nexus-storage", - libraryDependencies ++= Seq( - apacheCompress, - apacheIO, - akkaHttp, - akkaHttpCirce, - akkaStream, - akkaSlf4j, - alpakkaFile, - catsCore, - catsEffect, - circeCore, - circeGenericExtras, - fs2io, - logback, - pureconfig, - akkaHttpTestKit % Test, - akkaTestKit % Test, - mockito % Test, - munit % Test, - munitCatsEffect % Test, - scalaTest % Test - ), - addCompilerPlugin(betterMonadicFor), - cleanFiles ++= Seq( - baseDirectory.value / "permissions-fixer" / "target" / "**", - baseDirectory.value / "nexus-storage.jar" - ), - Test / testOptions += Tests.Argument(TestFrameworks.ScalaTest, "-o", "-u", "target/test-reports"), - Test / parallelExecution := false, - Test / classLoaderLayeringStrategy := ClassLoaderLayeringStrategy.ScalaLibrary, - Universal / mappings := { - (Universal / mappings).value :+ cargo.value - } - ) - lazy val tests = project .in(file("tests")) .dependsOn(testkit) @@ -871,7 +795,7 @@ lazy val root = project .in(file(".")) .settings(name := "nexus", moduleName := "nexus") .settings(compilation, shared, noPublish) - .aggregate(docs, delta, ship, storage, tests) + .aggregate(docs, delta, ship, tests) lazy val noPublish = Seq( publish / skip := true, @@ -920,23 +844,6 @@ lazy val kamonSettings = Seq( javaAgents += kanelaAgent ) -lazy val storageAssemblySettings = Seq( - assembly / assemblyJarName := "nexus-storage.jar", - assembly / test := {}, - assembly / assemblyMergeStrategy := { - case PathList("org", "apache", "commons", "logging", xs @ _*) => MergeStrategy.last - case PathList("org", "apache", "commons", "codec", xs @ _*) => MergeStrategy.last - case PathList("akka", "remote", "kamon", xs @ _*) => MergeStrategy.last - case PathList("kamon", "instrumentation", "akka", "remote", xs @ _*) => MergeStrategy.last - case PathList("javax", "annotation", xs @ _*) => MergeStrategy.first - case PathList("META-INF", "okio.kotlin_module") => MergeStrategy.first - case x if x.endsWith("module-info.class") => MergeStrategy.discard - case x => - val oldStrategy = (assembly / assemblyMergeStrategy).value - oldStrategy(x) - } -) - lazy val discardModuleInfoAssemblySettings = Seq( assembly / assemblyMergeStrategy := { case x if x.contains("io.netty.versions.properties") => MergeStrategy.discard @@ -1036,9 +943,7 @@ ThisBuild / licenses := Seq("Apache-2.0" -> url("http://www. ThisBuild / scmInfo := Some(ScmInfo(url("https://github.com/BlueBrain/nexus"), "scm:git:git@github.com:BlueBrain/nexus.git")) ThisBuild / developers := List( Developer("imsdu", "Simon Dumas", "noreply@epfl.ch", url("https://bluebrain.epfl.ch/")), - Developer("olivergrabinski ", "Oliver Grabinski", "noreply@epfl.ch", url("https://bluebrain.epfl.ch/")), - Developer("shinyhappydan", "Daniel Bell", "noreply@epfl.ch", url("https://bluebrain.epfl.ch/")), - Developer("dantb", "Daniel Tattan-Birch", "noreply@epfl.ch", url("https://bluebrain.epfl.ch/")) + Developer("shinyhappydan", "Daniel Bell", "noreply@epfl.ch", url("https://bluebrain.epfl.ch/")) ) ThisBuild / sonatypeCredentialHost := "s01.oss.sonatype.org" ThisBuild / sonatypeRepository := "https://s01.oss.sonatype.org/service/local" diff --git a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/remotestorage/RemoteStorageClientFixtures.scala b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/remotestorage/RemoteStorageClientFixtures.scala index 0becb45de6..3fad5a51db 100644 --- a/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/remotestorage/RemoteStorageClientFixtures.scala +++ b/delta/plugins/storage/src/test/scala/ch/epfl/bluebrain/nexus/delta/plugins/storage/remotestorage/RemoteStorageClientFixtures.scala @@ -21,7 +21,7 @@ trait RemoteStorageClientFixtures extends BeforeAndAfterAll with ConfigFixtures private val rwx = PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx")) private val tmpFolder: Path = Files.createTempDirectory("root", rwx) - val storageVersion: String = "1.9.0" + val storageVersion: String = "1.10.0" protected val container: RemoteStorageContainer = new RemoteStorageContainer(storageVersion, tmpFolder) diff --git a/storage/permissions-fixer/Cargo.toml b/storage/permissions-fixer/Cargo.toml deleted file mode 100644 index b09b04b63a..0000000000 --- a/storage/permissions-fixer/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "nexus-fixer" -version = "1.0.0" -authors = ["Henry Genet "] -edition = "2018" -build = "build.rs" - -[dependencies] -clap = "2.33.0" -libc = "0.2.60" -rand = "0.7" -walkdir = "2.2.9" - -[build-dependencies] -built = "0.3.1" \ No newline at end of file diff --git a/storage/permissions-fixer/Dockerfile b/storage/permissions-fixer/Dockerfile deleted file mode 100644 index 6465091d9d..0000000000 --- a/storage/permissions-fixer/Dockerfile +++ /dev/null @@ -1,11 +0,0 @@ -FROM rust:latest - -ENV NEXUS_PATH_PREFIX /tmp/nexus-fixer - -RUN apt-get update -yqq && apt-get install -yqq --no-install-recommends build-essential - -RUN mkdir /opt/src -WORKDIR /opt/src -COPY . /opt/src - -CMD ["cargo", "test"] \ No newline at end of file diff --git a/storage/permissions-fixer/README.md b/storage/permissions-fixer/README.md deleted file mode 100644 index 497f4503a3..0000000000 --- a/storage/permissions-fixer/README.md +++ /dev/null @@ -1,41 +0,0 @@ -## Nexus storage permissions fixer - -### Setup - -Use [rustup](https://rustup.rs). - -### Build with custom configuration - -```bash -NEXUS_PATH_PREFIX=/path/to/gpfs/ NEXUS_USER_ID=12345 NEXUS_GROUP_ID=67890 cargo build --release -``` - -### Run tests in Docker - -```bash -docker build . --tag=nexus/fixer -docker run -it nexus/fixer -``` - -### Usage - -#### Apply permissions - -```bash -nexus-fixer PATH -``` - -Where `PATH` is an absolute and valid path to a file or directory. -If `PATH` points to a directory, permissions are applied recursively on the directory and all its children. - -#### Show compile-time configuration - -```bash -nexus-fixer -s -``` - -#### Show help - -```bash -nexus-fixer -h -``` diff --git a/storage/permissions-fixer/build.rs b/storage/permissions-fixer/build.rs deleted file mode 100644 index d3c03817df..0000000000 --- a/storage/permissions-fixer/build.rs +++ /dev/null @@ -1,5 +0,0 @@ -use built::write_built_file; - -fn main() { - write_built_file().expect("Failed to acquire build-time information"); -} diff --git a/storage/permissions-fixer/src/config.rs b/storage/permissions-fixer/src/config.rs deleted file mode 100644 index 03e4f641d3..0000000000 --- a/storage/permissions-fixer/src/config.rs +++ /dev/null @@ -1,56 +0,0 @@ -use libc::{gid_t, uid_t}; - -use crate::errors::Failure; -use crate::errors::Failure::{InvalidGroupId, InvalidUserId}; - -#[allow(dead_code)] -mod built_info { - include!(concat!(env!("OUT_DIR"), "/built.rs")); -} - -const PATH_PREFIX: Option<&'static str> = option_env!("NEXUS_PATH_PREFIX"); -const PATH_PREFIX_DEFAULT: &'static str = "/tmp/"; - -const UID: Option<&'static str> = option_env!("NEXUS_USER_ID"); -const UID_DEFAULT: uid_t = 1000; - -const GID: Option<&'static str> = option_env!("NEXUS_GROUP_ID"); -const GID_DEFAULT: gid_t = 1000; - -pub const FILE_MASK: u32 = 0o440; -pub const CHMOD_MASK_WX_GROUP: u32 = 0b000011000; -pub const DIR_MASK: u32 = 0o750; - -pub fn get_path_prefix() -> &'static str { - PATH_PREFIX.unwrap_or(PATH_PREFIX_DEFAULT) -} - -pub fn get_uid() -> Result { - match UID { - Some(uid) => uid.parse::().map_err(|_| InvalidUserId), - None => Ok(UID_DEFAULT), - } -} - -pub fn get_gid() -> Result { - match GID { - Some(gid) => gid.parse::().map_err(|_| InvalidGroupId), - None => Ok(GID_DEFAULT), - } -} - -pub fn show_config() -> Result<(), Failure> { - println!("Package version: {}", built_info::PKG_VERSION); - println!( - "Git commit: {}", - built_info::GIT_VERSION.unwrap_or("Unknown") - ); - println!("Rust version: {}", built_info::RUSTC_VERSION); - println!("Platform: {}", built_info::TARGET); - println!("Path prefix: {}", get_path_prefix()); - println!("UID: {}", get_uid()?); - println!("GID: {}", get_gid()?); - println!("File permissions mask: {:o}", FILE_MASK); - println!("Directory permissions mask: {:o}", DIR_MASK); - Ok(()) -} diff --git a/storage/permissions-fixer/src/errors.rs b/storage/permissions-fixer/src/errors.rs deleted file mode 100644 index 72a45cb6d2..0000000000 --- a/storage/permissions-fixer/src/errors.rs +++ /dev/null @@ -1,55 +0,0 @@ -use std::error::Error; -use std::fmt; -use std::fmt::{Debug, Formatter}; -use std::io; - -use walkdir; - -use Failure::*; - -#[derive(Eq, PartialEq)] -pub enum Failure { - PathCannotBeEmpty, - PathCannotContainLinks, - PathCannotHaveNull, - PathMustBeAbsolute, - PathMustBeCanonical, - PathMustStartWithPrefix, - FileNotFound, - IOError(String), - InvalidUserId, - InvalidGroupId, - ChmodFailed, - ChownFailed, -} - -impl From for Failure { - fn from(ioe: io::Error) -> Self { - Failure::IOError(ioe.description().to_owned()) - } -} - -impl From for Failure { - fn from(we: walkdir::Error) -> Self { - Failure::IOError(we.description().to_owned()) - } -} - -impl Debug for Failure { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - match self { - IOError(desc) => write!(f, "IO exception: {}", desc), - PathCannotBeEmpty => write!(f, "{}", "path cannot be empty"), - PathCannotContainLinks => write!(f, "{}", "path cannot contain links"), - PathCannotHaveNull => write!(f, "{}", "path cannot contain 'null' characters"), - PathMustBeAbsolute => write!(f, "{}", "path must be absolute"), - PathMustBeCanonical => write!(f, "{}", "path must be canonical"), - PathMustStartWithPrefix => write!(f, "{}", "path must start with configured prefix"), - FileNotFound => write!(f, "{}", "file not found"), - InvalidUserId => write!(f, "{}", "invalid user ID"), - InvalidGroupId => write!(f, "{}", "invalid group ID"), - ChmodFailed => write!(f, "{}", "'chmod' operation failed"), - ChownFailed => write!(f, "{}", "'chown' operation failed"), - } - } -} diff --git a/storage/permissions-fixer/src/main.rs b/storage/permissions-fixer/src/main.rs deleted file mode 100644 index 1121d651a7..0000000000 --- a/storage/permissions-fixer/src/main.rs +++ /dev/null @@ -1,41 +0,0 @@ -use clap::{App, Arg}; - -use crate::config::show_config; -use crate::errors::Failure; -use crate::errors::Failure::PathCannotBeEmpty; -use crate::path::apply_permissions; - -mod config; -mod errors; -mod path; - -fn main() -> Result<(), Failure> { - let args = App::new("nexus-fixer") - .version("1.0.0") - .author("BlueBrain Nexus ") - .about("Utility to fix permissions on files moved by the Nexus storage service.") - .arg( - Arg::with_name("PATH") - .help("The target path") - .required(true) - .conflicts_with("show-config") - .index(1), - ) - .arg( - Arg::with_name("show-config") - .short("s") - .long("show-config") - .help("Prints compile-time configuration") - .conflicts_with("PATH"), - ) - .get_matches(); - - if args.is_present("show-config") { - return show_config(); - } - - match args.value_of("PATH") { - Some(path) => apply_permissions(path).map(|_| println!("Done.")), - None => Err(PathCannotBeEmpty), - } -} diff --git a/storage/permissions-fixer/src/path.rs b/storage/permissions-fixer/src/path.rs deleted file mode 100644 index 08660e1374..0000000000 --- a/storage/permissions-fixer/src/path.rs +++ /dev/null @@ -1,266 +0,0 @@ -use std::convert::TryInto; -use std::ffi::CString; -use std::os::unix::ffi::OsStrExt; -use std::os::unix::fs::MetadataExt; -use std::path::Path; - -use libc::{chmod, chown}; -use walkdir::WalkDir; - -use crate::config::*; -use crate::errors::Failure; -use crate::errors::Failure::*; -use std::fs; -use std::os::unix::fs::PermissionsExt; -use std::fs::Metadata; - -pub fn apply_permissions(path: &str) -> Result<(), Failure> { - check_path(path).and_then(check_links).and_then(visit_all) -} - -fn check_path(path: &str) -> Result<&Path, Failure> { - let p = Path::new(path); - if p.is_relative() { - Err(PathMustBeAbsolute) - } else if !p.starts_with(get_path_prefix()) { - Err(PathMustStartWithPrefix) - } else if !p.exists() { - Err(FileNotFound) - } else if p.canonicalize()? != p.to_path_buf() { - Err(PathMustBeCanonical) - } else { - Ok(p) - } -} - -fn check_links(path: &Path) -> Result<&Path, Failure> { - let result: Result, Failure> = WalkDir::new(path) - .into_iter() - .map(|e| { - let entry = e?; - let meta = entry.metadata()?; - Ok(entry.path_is_symlink() || (meta.is_file() && meta.nlink() > 1)) - }) - .collect(); - result.and_then(|links| { - if links.into_iter().any(|b| b) { - Err(PathCannotContainLinks) - } else { - Ok(path) - } - }) -} - -fn add_parent_permissions(parent: &Path) -> Result<(), Failure> { - let mask = fetch_permissions(parent) | CHMOD_MASK_WX_GROUP; - set_permissions(parent, mask) -} - -fn visit_all(path: &Path) -> Result<(), Failure> { - let parent_result = path.parent().map_or(Ok(()), |p| add_parent_permissions(p).and_then(|_| set_group(p))); - let result: Result, Failure> = WalkDir::new(path) - .into_iter() - .map(|e| { - let entry = e?; - let p = entry.path(); - if p.is_dir() { - set_owner(p).and_then(|_| set_permissions(p, DIR_MASK)) - } else { - set_owner(p).and_then(|_| set_permissions(p, FILE_MASK)) - } - }) - .collect(); - parent_result.and_then(|_| result.map(|_| ())) -} - -fn set_owner(path: &Path) -> Result<(), Failure> { - set_custom_owner(path, get_uid()?, get_gid()?) -} - -fn set_group(path: &Path) -> Result<(), Failure> { - let metadata = fetch_metadata(path); - set_custom_owner(path, metadata.uid(), get_gid()?) -} - -fn set_custom_owner(path: &Path, uid: u32, gid: u32) -> Result<(), Failure> { - let p = CString::new(path.as_os_str().as_bytes()).map_err(|_| PathCannotHaveNull)?; - let chown = unsafe { chown(p.as_ptr() as *const i8, uid, gid) }; - if chown == 0 { - Ok(()) - } else { - Err(ChownFailed) - } -} - -fn set_permissions(path: &Path, mask: u32) -> Result<(), Failure> { - let p = CString::new(path.as_os_str().as_bytes()).map_err(|_| PathCannotHaveNull)?; - let chmod = unsafe { chmod(p.as_ptr() as *const i8, mask.try_into().unwrap()) }; - if chmod == 0 { - Ok(()) - } else { - Err(ChmodFailed) - } -} - -fn fetch_permissions(path: &Path) -> u32 { - fetch_metadata(path).permissions().mode() -} - -fn fetch_metadata(path: &Path) -> Metadata { - fs::metadata(path).expect("failed to read file metadata") -} - -#[cfg(test)] -mod tests { - use std::io; - use std::os::unix::fs::{symlink, MetadataExt}; - - use rand::{thread_rng, Rng}; - - use super::*; - - #[test] - fn test_check_path() { - setup(); - assert_eq!(check_path("../foo"), Err(PathMustBeAbsolute)); - assert_eq!(check_path("/foo"), Err(PathMustStartWithPrefix)); - assert_eq!(check_path("/tmp/nexus-fixer/bar"), Err(FileNotFound)); - assert!(touch(Path::new("/tmp/nexus-fixer/bar")).is_ok()); - assert!(check_path("/tmp/nexus-fixer/bar").is_ok()); - assert_eq!( - check_path("/tmp/nexus-fixer/../nexus-fixer/bar"), - Err(PathMustBeCanonical) - ); - } - - #[test] - fn test_set_owner() { - setup(); - let p = Path::new("/tmp/nexus-fixer/baz"); - assert!(touch(p).is_ok()); - assert!(set_owner(p).is_ok()); - check_owner( - p, - get_uid().expect("failed to read UID"), - get_gid().expect("failed to read GID"), - ); - assert!(fs::remove_file(p).is_ok()); - } - - #[test] - fn test_set_group() { - setup(); - let p = Path::new("/tmp/nexus-fixer/batman"); - assert!(touch(p).is_ok()); - assert!(set_owner(p).is_ok()); - check_group( - p, - get_gid().expect("failed to read GID"), - ); - assert!(fs::remove_file(p).is_ok()); - } - - #[test] - fn test_set_permissions() { - setup(); - let p = Path::new("/tmp/nexus-fixer/qux"); - let mask = random_mask(); - assert!(touch(p).is_ok()); - assert!(set_permissions(p, mask).is_ok()); - check_permissions(p, mask); - assert!(fs::remove_file(p).is_ok()); - } - - #[test] - fn test_check_links() { - setup(); - let p = Path::new("/tmp/nexus-fixer/d/e/f"); - assert!(fs::create_dir_all(p).is_ok()); - let file_a = Path::new("/tmp/nexus-fixer/d/file_a"); - let file_b = Path::new("/tmp/nexus-fixer/d/e/file_b"); - let file_c = Path::new("/tmp/nexus-fixer/d/e/f/file_c"); - assert!(touch(file_a).is_ok()); - assert!(touch(file_b).is_ok()); - assert!(touch(file_c).is_ok()); - - let toplevel = Path::new("/tmp/nexus-fixer/d"); - assert!(check_links(toplevel).is_ok()); - - let softlink = Path::new("/tmp/nexus-fixer/d/symlink"); - assert!(symlink(file_a, softlink).is_ok()); - assert_eq!(check_links(toplevel), Err(PathCannotContainLinks)); - assert!(fs::remove_file(softlink).is_ok()); - - let hardlink = Path::new("/tmp/nexus-fixer/d/hardlink"); - assert!(fs::hard_link(file_b, hardlink).is_ok()); - assert_eq!(check_links(toplevel), Err(PathCannotContainLinks)); - assert!(fs::remove_dir_all(toplevel).is_ok()); - } - - #[test] - fn test_visit_all() { - setup(); - let p = Path::new("/tmp/nexus-fixer/a/b/c"); - assert!(fs::create_dir_all(p).is_ok()); - let file_a = Path::new("/tmp/nexus-fixer/a/file_a"); - let file_b = Path::new("/tmp/nexus-fixer/a/b/file_b"); - let file_c = Path::new("/tmp/nexus-fixer/a/b/c/file_c"); - assert!(touch(file_a).is_ok()); - assert!(touch(file_b).is_ok()); - assert!(touch(file_c).is_ok()); - check_permissions(Path::new("/tmp/nexus-fixer/"), 0o755); - assert!(visit_all(Path::new("/tmp/nexus-fixer/a")).is_ok()); - - let uid = get_uid().expect("failed to read UID"); - let gid = get_gid().expect("failed to read GID"); - - // dirs - check_permissions(Path::new("/tmp/nexus-fixer/a"), DIR_MASK); - check_owner(Path::new("/tmp/nexus-fixer/a"), uid, gid); - check_permissions(Path::new("/tmp/nexus-fixer/a/b"), DIR_MASK); - check_owner(Path::new("/tmp/nexus-fixer/a/b"), uid, gid); - check_permissions(Path::new("/tmp/nexus-fixer/a/b/c"), DIR_MASK); - check_owner(Path::new("/tmp/nexus-fixer/a/b/c"), uid, gid); - check_permissions(Path::new("/tmp/nexus-fixer/"), 0o775); - - // files - check_permissions(file_a, FILE_MASK); - check_owner(file_a, uid, gid); - check_permissions(file_b, FILE_MASK); - check_owner(file_b, uid, gid); - check_permissions(file_c, FILE_MASK); - check_owner(file_c, uid, gid); - assert!(fs::remove_dir_all(Path::new("/tmp/nexus-fixer/a")).is_ok()); - } - - fn check_owner(path: &Path, uid: u32, gid: u32) { - let metadata = fetch_metadata(path); - assert_eq!(metadata.uid(), uid); - assert_eq!(metadata.gid(), gid); - } - - fn check_group(path: &Path, gid: u32) { - let metadata = fetch_metadata(path); - assert_eq!(metadata.gid(), gid); - } - - fn check_permissions(path: &Path, mask: u32) { - assert_eq!(fetch_permissions(path) & 0o777, mask); - } - - fn random_mask() -> u32 { - thread_rng().gen_range(0, 0o1000) - } - - // A simple implementation of `% touch path` (ignores existing files) - fn touch(path: &Path) -> io::Result<()> { - match fs::OpenOptions::new().create(true).write(true).open(path) { - Ok(_) => Ok(()), - Err(e) => Err(e), - } - } - - fn setup() { - assert!(fs::create_dir_all("/tmp/nexus-fixer").is_ok()); - } -} diff --git a/storage/permissions-fixer/tests/unit-tests-with-env.sh b/storage/permissions-fixer/tests/unit-tests-with-env.sh deleted file mode 100755 index ebdd5f4afe..0000000000 --- a/storage/permissions-fixer/tests/unit-tests-with-env.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env bash -set -x -if [[ $EUID -ne 0 ]]; then - echo "Error: this script must be run as root" - exit 1 -fi - -WORKSPACE=$PWD -RND=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 16 | head -n 1) -TMP_DIR="/tmp/$RND" -mkdir $TMP_DIR -cp -a . $TMP_DIR -cd $TMP_DIR || exit 1 - -export NEXUS_PATH_PREFIX=/tmp/nexus-fixer -mkdir $NEXUS_PATH_PREFIX -cargo test - -rm -rf $NEXUS_PATH_PREFIX -rm -rf $TMP_DIR -cd $WORKSPACE || exit 1 diff --git a/storage/src/main/resources/akka.conf b/storage/src/main/resources/akka.conf deleted file mode 100644 index 10f80b7154..0000000000 --- a/storage/src/main/resources/akka.conf +++ /dev/null @@ -1,26 +0,0 @@ -akka { - - http { - server { - transparent-head-requests = off - parsing.max-content-length = 100g - request-timeout = 20 seconds - } - host-connection-pool { - max-connections = 16 - max-open-requests = 64 - } - sse { - # The maximum size for parsing server-sent events (96KiB). - max-event-size = 98304 - - # The maximum size for parsing lines of a server-sent event (48KiB). - max-line-size = 49152 - } - } - - loggers = ["akka.event.slf4j.Slf4jLogger"] - logging-filter = "akka.event.slf4j.Slf4jLoggingFilter" - log-dead-letters = off - loglevel = INFO -} diff --git a/storage/src/main/resources/app.conf b/storage/src/main/resources/app.conf deleted file mode 100644 index 76d1d3c168..0000000000 --- a/storage/src/main/resources/app.conf +++ /dev/null @@ -1,118 +0,0 @@ -# All application specific configuration should reside here -app { - # The service description namespace - description { - # The name of the service - name = "storage" - } - - # Service instance specific settings - instance { - # The default interface to bind to - interface = 127.0.0.1 - } - - # Http binding settings - http { - # The interface to bind to - interface = ${app.instance.interface} - # The port to bind to - port = 8080 - # The default uri prefix - prefix = "v1" - # The service public uri - public-uri = "http://"${app.http.interface}":"${app.http.port} - } - - # Service runtime settings - runtime { - # Arbitrary Future completion timeout - default-timeout = 30 seconds - } - - # Storage configuration - storage { - # the absolute path where the files are stored - root-volume = "/tmp" - # additional path prefixes from which it is allowed to link - extra-prefixes = [] - # the relative path of the protected directory once the storage bucket is selected - protected-directory = "nexus" - # permissions fixer - fixer-enabled = false - fixer-command = [] - # if atomic move (e.g. mv or rename) isn't supported, link using a copy and delete instead - link-with-atomic-move = false - } - - # Allows to define default media types for the given file extensions - media-type-detector { - extensions { - #extension = "application/custom" - } - } - - digest { - # the digest algorithm - algorithm = "SHA-256" - # the maximum number of digests stored in memory - max-in-memory = 10000 - # the maximum number of concurrent computations of digest - concurrent-computations = 4 - # the maximum number of computations in queue to be computed - max-in-queue = 10000 - # the amout of time after a digest which is still in the queue to be computed can be retrigger - retrigger-after = 30 minutes - } - - # Allowed subject to perform calls - authorization { - # flag to decide whether a token is expected or not to accept the incoming requests - # valid values: "anonymous" or "verify-token" - type = anonymous - # the user realm. It must be present when anonymous = false and it must be removed when anonymous = true - # issuer = "realm" - # the user name. It must be present when anonymous = false and it must be removed when anonymous = true - # subject = "username" - # the optional set of audiences of the realm - # audiences = [ ] - # Public JWK keys to validate the incoming token - # keys = [ "key" ] - } - - # monitoring config - monitoring { - # tracing settings - trace { - # Interval at which sampled finished spans will be flushed to SpanReporters. - tick-interval = 10 seconds - # Decides whether to include the stack trace of a Throwable as the "error.stacktrace" Span tag when a Span is marked - # as failed. - include-error-stacktrace = yes - # Configures a sampler that decides which Spans should be sent to Span reporters. The possible values are: - # - always: report all traces. - # - never: don't report any trace. - # - random: randomly decide using the probability defined in the random-sampler.probability setting. - # - adaptive: keeps dynamic samplers for each operation while trying to achieve a set throughput goal. - # - A FQCN of a kamon.trace.Sampler implementation to be used instead. The implementation must have a default - # constructor that will be used by Kamon when creating the instance. - # - sampler = "adaptive" - } - # jaeger settings - jaeger { - enabled = false - host = "localhost" - port = 14268 - # Protocol used to send data to Jaeger. The available options are: - # - http: Sends spans using jaeger.thrift over HTTP. Aimed to used with a Jaeger Collector. - # - https: Sends spans using jaeger.thrift over HTTPS. Aimed to used with a Jaeger Collector. - # - udp: Sends spans using jaeger.thrift compact over UDP. Aimed to used with a Jaeger Agent. - protocol = http - # for http and https, this is the full url to be used - http-url = ${app.monitoring.jaeger.protocol}"://"${app.monitoring.jaeger.host}":"${app.monitoring.jaeger.port}"/api/traces" - # Enable or disable including tags from kamon.environment as labels - include-environment-tags = no - } - } -} \ No newline at end of file diff --git a/storage/src/main/resources/application.conf b/storage/src/main/resources/application.conf deleted file mode 100644 index 915b9170a0..0000000000 --- a/storage/src/main/resources/application.conf +++ /dev/null @@ -1,3 +0,0 @@ -include "app.conf" -include "akka.conf" -include "kamon.conf" \ No newline at end of file diff --git a/storage/src/main/resources/kamon.conf b/storage/src/main/resources/kamon.conf deleted file mode 100644 index dde64e4dd9..0000000000 --- a/storage/src/main/resources/kamon.conf +++ /dev/null @@ -1,68 +0,0 @@ -kamon { - environment { - service = ${app.description.name} - } - trace { - tick-interval = ${app.monitoring.trace.tick-interval} - include-error-stacktrace = ${app.monitoring.trace.include-error-stacktrace} - sampler = ${app.monitoring.trace.sampler} - # An adaptive sampler tries to balance a global throughput goal across all operations in the current application, - # making the best possible effort to provide sampled traces of all operations. - # - adaptive-sampler { - - # The target maximum number of affirmative sample decisions to be taken by the sampler. The sampler will do a best - # effort to balance sampling decisions across all operations to produce no more than this number of affirmative - # decisions. - throughput = 600 - - # Groups allow to override the default balacing behavior for a particular subset of operations in this - # application. With groups, users can guarantee that certain operations will always be sampled, never sampled, or - # provide minimum and/or maximum sampled traces throughput goals. - # - # Groups have two properties: an operation name regex list which decides what operations get into the group and a - # set of rules for that group. The available rules are: - # - # sample: [always|never] Provides a definitive sampling decision for all operations in the group. - # When this rule is set, the sampler will always return this decision and any other - # configured rule will be ignored. - # - # minimum-throughput: [number] Defines the minimum number of sampled traces expected per minute for each - # operation in the group. Even though the sampler will do its best effort to provide the - # minimum number of sampled traces, the actual minimum will vary depending on application - # traffic and the global throughput goal. - # - # maximum-throughput: [number] Defines the maximum number of sampled traces expected per minute for each - # operation in the group, regardless of whether there is room left before meeting the global - # throughput goal. - # - # For example, if you wanted to ensure that health check operations are never sampled you could include - # - # health-checks { - # operations = ["GET \/status"] - # - # rules { - # sample = never - # } - # } - # - # - groups { - } - } - } - - modules { - jaeger { - enabled = ${app.monitoring.jaeger.enabled} - } - } - - jaeger { - host = ${app.monitoring.jaeger.host} - port = ${app.monitoring.jaeger.port} - protocol = ${app.monitoring.jaeger.protocol} - http-url = ${app.monitoring.jaeger.http-url} - include-environment-tags = ${app.monitoring.jaeger.include-environment-tags} - } -} \ No newline at end of file diff --git a/storage/src/main/resources/logback.xml b/storage/src/main/resources/logback.xml deleted file mode 100644 index 6ab8f42ce5..0000000000 --- a/storage/src/main/resources/logback.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - %d{yyyy-MM-dd HH:mm:ss} %-5level %logger{36} - %msg%n - - - - - - - - %d{yyyy-MM-dd HH:mm:ss} %-5level [%traceToken] %logger{36} - %msg%n - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/File.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/File.scala deleted file mode 100644 index b7e51c97ba..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/File.scala +++ /dev/null @@ -1,75 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.model.{ContentType, Uri} -import ch.epfl.bluebrain.nexus.storage.config.Contexts.resourceCtxIri -import ch.epfl.bluebrain.nexus.storage.jsonld.JsonLdContext.addContext -import io.circe.generic.extras.Configuration -import io.circe.generic.extras.semiauto._ -import io.circe.{Decoder, Encoder} - -import scala.util.Try - -// $COVERAGE-OFF$ -object File { - - implicit private val config: Configuration = Configuration.default - .copy(transformMemberNames = { - case "@context" => "@context" - case key => s"_$key" - }) - - /** - * Holds some of the metadata information related to a file. - * - * @param filename - * the original filename of the file - * @param mediaType - * the media type of the file - */ - final case class FileDescription(filename: String, mediaType: ContentType) - - /** - * Holds all the metadata information related to the file. - * - * @param location - * the file location - * @param bytes - * the size of the file file in bytes - * @param digest - * the digest information of the file - * @param mediaType - * the media type of the file - */ - final case class FileAttributes(location: Uri, bytes: Long, digest: Digest, mediaType: ContentType) - object FileAttributes { - import ch.epfl.bluebrain.nexus.delta.kernel.instances._ - - implicit final private val uriDecoder: Decoder[Uri] = - Decoder.decodeString.emapTry(s => Try(Uri(s))) - - implicit final private val uriEncoder: Encoder[Uri] = - Encoder.encodeString.contramap(_.toString()) - - implicit val fileAttrEncoder: Encoder[FileAttributes] = - deriveConfiguredEncoder[FileAttributes].mapJson(addContext(_, resourceCtxIri)) - implicit val fileAttrDecoder: Decoder[FileAttributes] = deriveConfiguredDecoder[FileAttributes] - } - - /** - * Digest related information of the file - * - * @param algorithm - * the algorithm used in order to compute the digest - * @param value - * the actual value of the digest of the file - */ - final case class Digest(algorithm: String, value: String) - - object Digest { - val empty: Digest = Digest("", "") - implicit val digestEncoder: Encoder[Digest] = deriveConfiguredEncoder[Digest] - implicit val digestDecoder: Decoder[Digest] = deriveConfiguredDecoder[Digest] - } - -} -// $COVERAGE-ON$ diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/JsonLdCirceSupport.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/JsonLdCirceSupport.scala deleted file mode 100644 index e8e2c23223..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/JsonLdCirceSupport.scala +++ /dev/null @@ -1,100 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller} -import akka.http.scaladsl.model.MediaTypes.`application/json` -import akka.http.scaladsl.model.{ContentTypeRange, HttpEntity} -import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.{sortKeys, OrderedKeys} -import ch.epfl.bluebrain.nexus.storage.MediaTypes.`application/ld+json` -import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport -import io.circe.syntax._ -import io.circe.{Encoder, Json, JsonObject, Printer} - -import scala.collection.immutable.Seq - -/** - * Json-LD specific akka http circe support. - * - * It uses [[`application/ld+json`]] as the default content type for encoding json trees into http request payloads. - */ -trait JsonLdCirceSupport extends FailFastCirceSupport { - - override def unmarshallerContentTypes: Seq[ContentTypeRange] = - List(`application/json`, `application/ld+json`) - - /** - * `A` => HTTP entity - * - * @tparam A - * type to encode - * @return - * marshaller for any `A` value - */ - implicit final def marshallerHttp[A](implicit - encoder: Encoder[A], - printer: Printer = Printer.noSpaces.copy(dropNullValues = true), - keys: OrderedKeys = OrderedKeys() - ): ToEntityMarshaller[A] = - jsonLdMarshaller.compose(encoder.apply) - - /** - * `Json` => HTTP entity - * - * @return - * marshaller for JSON-LD value - */ - implicit final def jsonLdMarshaller(implicit - printer: Printer = Printer.noSpaces.copy(dropNullValues = true), - keys: OrderedKeys = OrderedKeys() - ): ToEntityMarshaller[Json] = - Marshaller.withFixedContentType(`application/ld+json`) { json => - HttpEntity(`application/ld+json`, printer.print(sortKeys(json))) - } - -} - -object JsonLdCirceSupport extends JsonLdCirceSupport { - - /** - * Data type which holds the ordering for the JSON-LD keys. - * - * @param keys - * list of strings which defines the ordering for the JSON-LD keys - */ - final case class OrderedKeys(keys: List[String]) { - lazy val withPosition: Map[String, Int] = keys.zipWithIndex.toMap - } - object OrderedKeys { - - /** - * Construct an empty [[OrderedKeys]] - */ - final def apply(): OrderedKeys = new OrderedKeys(List("")) - } - - /** - * Order json keys according to the passed [[OrderedKeys]] - */ - def sortKeys(json: Json)(implicit keys: OrderedKeys): Json = { - - implicit val customStringOrdering: Ordering[String] = new Ordering[String] { - private val middlePos = keys.withPosition("") - - private def position(key: String): Int = keys.withPosition.getOrElse(key, middlePos) - - override def compare(x: String, y: String): Int = { - val posX = position(x) - val posY = position(y) - if (posX == middlePos && posY == middlePos) x compareTo y - else posX compareTo posY - } - } - - def canonicalJson(json: Json): Json = - json.arrayOrObject[Json](json, arr => Json.fromValues(arr.map(canonicalJson)), obj => sorted(obj).asJson) - - def sorted(jObj: JsonObject): JsonObject = - JsonObject.fromIterable(jObj.toVector.sortBy(_._1).map { case (k, v) => k -> canonicalJson(v) }) - - canonicalJson(json) - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala deleted file mode 100644 index 2688434dc2..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Main.scala +++ /dev/null @@ -1,112 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.actor.ActorSystem -import akka.event.{Logging, LoggingAdapter} -import akka.http.scaladsl.Http -import akka.http.scaladsl.server.Route -import akka.util.Timeout -import cats.effect.{ExitCode, IO, IOApp} -import ch.epfl.bluebrain.nexus.delta.kernel.utils.TransactionalFileCopier -import ch.epfl.bluebrain.nexus.storage.Storages.DiskStorage -import ch.epfl.bluebrain.nexus.storage.attributes.{AttributesCache, ContentTypeDetector} -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod -import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ -import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings} -import ch.epfl.bluebrain.nexus.storage.files.ValidateFile -import ch.epfl.bluebrain.nexus.storage.routes.Routes -import com.typesafe.config.{Config, ConfigFactory} -import kamon.Kamon - -import java.nio.file.Paths -import java.time.Clock -import scala.concurrent.duration._ -import scala.concurrent.{Await, ExecutionContext, Future} -import scala.util.{Failure, Success} - -//noinspection TypeAnnotation -// $COVERAGE-OFF$ -object Main extends IOApp { - - def loadConfig(): Config = { - val cfg = sys.env.get("STORAGE_CONFIG_FILE") orElse sys.props.get("storage.config.file") map { str => - val file = Paths.get(str).toAbsolutePath.toFile - ConfigFactory.parseFile(file) - } getOrElse ConfigFactory.empty() - (cfg withFallback ConfigFactory.load()).resolve() - } - - def setupMonitoring(config: Config): Unit = { - if (sys.env.getOrElse("KAMON_ENABLED", "false").toBoolean) { - Kamon.init(config) - } - } - - def shutdownMonitoring(): Unit = { - if (sys.env.getOrElse("KAMON_ENABLED", "false").toBoolean) { - Await.result(Kamon.stopModules(), 10.seconds) - } - } - - @SuppressWarnings(Array("UnusedMethodParameter")) - override def run(args: List[String]): IO[ExitCode] = { - val config = loadConfig() - setupMonitoring(config) - - implicit val appConfig: AppConfig = Settings(config).appConfig - - implicit val as: ActorSystem = ActorSystem(appConfig.description.fullName, config) - implicit val ec: ExecutionContext = as.dispatcher - implicit val authorizationMethod: AuthorizationMethod = appConfig.authorization - implicit val timeout = Timeout(1.minute) - implicit val clock = Clock.systemUTC - implicit val contentTypeDetector = new ContentTypeDetector(appConfig.mediaTypeDetector) - - val attributesCache: AttributesCache = AttributesCache[AkkaSource] - val validateFile: ValidateFile = ValidateFile.mk(appConfig.storage) - val copyFiles: TransactionalFileCopier = TransactionalFileCopier.mk() - - val storages: Storages[AkkaSource] = - new DiskStorage( - appConfig.storage, - contentTypeDetector, - appConfig.digest, - attributesCache, - validateFile, - copyFiles - ) - - val logger: LoggingAdapter = Logging(as, getClass) - - logger.info("==== Cluster is Live ====") - - if (authorizationMethod == AuthorizationMethod.Anonymous) { - logger.warning("The application has been configured with anonymous, the caller will not be verified !") - } - - logger.info(s"==== Full configuration is $appConfig ====") - - val routes: Route = Routes(storages) - - val httpBinding: Future[Http.ServerBinding] = { - Http().newServerAt(appConfig.http.interface, appConfig.http.port).bind(routes) - } - httpBinding onComplete { - case Success(binding) => - logger.info(s"Bound to ${binding.localAddress.getHostString}: ${binding.localAddress.getPort}") - case Failure(th) => - logger.error(th, "Failed to perform an http binding on {}:{}", appConfig.http.interface, appConfig.http.port) - Await.result(as.terminate(), 10.seconds) - } - - as.registerOnTermination { - shutdownMonitoring() - } - // attempt to leave the cluster before shutting down - val _ = sys.addShutdownHook { - Await.result(as.terminate().map(_ => ()), 10.seconds) - } - - IO.never.as(ExitCode.Success) - } -} -// $COVERAGE-ON$ diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/MediaTypes.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/MediaTypes.scala deleted file mode 100644 index 90886fda16..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/MediaTypes.scala +++ /dev/null @@ -1,9 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.model.MediaType -import akka.http.scaladsl.model.HttpCharsets.`UTF-8` - -object MediaTypes { - final val `application/ld+json`: MediaType.WithFixedCharset = - MediaType.applicationWithFixedCharset("ld+json", `UTF-8`, "jsonld") -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Rejection.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Rejection.scala deleted file mode 100644 index 11a5cc5eb0..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Rejection.scala +++ /dev/null @@ -1,81 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.model.StatusCodes -import akka.http.scaladsl.model.Uri.Path -import akka.http.scaladsl.server.{Rejection => AkkaRejection} -import ch.epfl.bluebrain.nexus.storage.routes.StatusFrom -import io.circe.generic.extras.Configuration -import io.circe.generic.extras.semiauto.deriveConfiguredEncoder -import io.circe.{Encoder, Json} - -/** - * Enumeration of resource rejection types. - * - * @param msg - * a descriptive message of the rejection - */ -sealed abstract class Rejection(val msg: String) extends AkkaRejection with Product with Serializable - -object Rejection { - - /** - * Signals an attempt to interact with a bucket that doesn't exist. - * - * @param name - * the storage bucket name - */ - final case class BucketNotFound(name: String) extends Rejection(s"The provided bucket '$name' does not exist.") - - /** - * Signals an attempt to override a path that already exists. - * - * @param name - * the storage bucket name - * @param path - * the path to the file - */ - final case class PathAlreadyExists(name: String, path: Path) - extends Rejection( - s"The provided location inside the bucket '$name' with the path '$path' already exists." - ) - - /** - * Signals an attempt to interact with a path that doesn't exist. - * - * @param name - * the storage bucket name - * @param path - * the path to the file - */ - final case class PathNotFound(name: String, path: Path) - extends Rejection( - s"The provided location inside the bucket '$name' with the path '$path' does not exist." - ) - - /** - * Signals that the location contains symbolic or hard links. - * - * @param name - * the storage bucket name - * @param path - * the path to the file - */ - final case class PathContainsLinks(name: String, path: Path) - extends Rejection( - s"The provided location inside the bucket '$name' with the path '$path' contains links. Please remove them in order to proceed with this call." - ) - - implicit def statusCodeFrom: StatusFrom[Rejection] = - StatusFrom { - case _: PathContainsLinks => StatusCodes.BadRequest - case _: PathAlreadyExists => StatusCodes.Conflict - case _: BucketNotFound => StatusCodes.NotFound - case _: PathNotFound => StatusCodes.NotFound - } - - implicit val rejectionEncoder: Encoder[Rejection] = { - implicit val rejectionConfig: Configuration = Configuration.default.withDiscriminator("@type") - val enc = deriveConfiguredEncoder[Rejection].mapJson(jsonError) - Encoder.instance(r => enc(r) deepMerge Json.obj("reason" -> Json.fromString(r.msg))) - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StorageError.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StorageError.scala deleted file mode 100644 index 2bd4578b1d..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StorageError.scala +++ /dev/null @@ -1,110 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.model.StatusCodes -import akka.http.scaladsl.model.Uri.Path -import ch.epfl.bluebrain.nexus.storage.routes.StatusFrom -import io.circe.generic.extras.Configuration -import io.circe.generic.extras.semiauto.deriveConfiguredEncoder -import io.circe.{Encoder, Json} - -/** - * Enumeration of runtime errors. - * - * @param msg - * a description of the error - */ - -sealed abstract class StorageError(val msg: String) extends Exception with Product with Serializable { - override def fillInStackTrace(): StorageError = this - override def getMessage: String = msg -} - -object StorageError { - - /** - * Generic wrapper for kg errors that should not be exposed to clients. - * - * @param reason - * the underlying error reason - */ - final case class InternalError(reason: String) extends StorageError(reason) - - /** - * Signals that the provided authentication is not valid. - */ - final case object AuthenticationFailed extends StorageError("The supplied authentication is invalid.") - - /** - * Signals that the caller doesn't have access to the selected resource. - */ - final case object AuthorizationFailed - extends StorageError("The supplied authentication is not authorized to access this resource.") - - /** - * Signals the inability to connect to an underlying service to perform a request. - * - * @param msg - * a human readable description of the cause - */ - final case class DownstreamServiceError(override val msg: String) extends StorageError(msg) - - /** - * Signals an attempt to interact with a path that doesn't exist. - * - * @param name - * the storage bucket name - * @param path - * the path to the file - */ - final case class PathNotFound(name: String, path: Path) - extends StorageError( - s"The provided location inside the bucket '$name' with the path '$path' does not exist." - ) - - /** - * Signals an attempt to interact with a path that is invalid. - * - * @param name - * the storage bucket name - * @param path - * the path to the file - */ - final case class PathInvalid(name: String, path: Path) - extends StorageError( - s"The provided location inside the bucket '$name' with the path '$path' is invalid." - ) - - /** - * Signals that the system call to the 'nexus-fixer' binary failed. - * - * @param path - * the absolute path to the file - * @param message - * the error message returned by the system call - */ - final case class PermissionsFixingFailed(path: String, message: String) - extends StorageError(s"Fixing permissions on the path '$path' failed with an error: $message") - - /** - * Signals an internal timeout. - * - * @param msg - * a descriptive message on the operation that timed out - */ - final case class OperationTimedOut(override val msg: String) extends StorageError(msg) - - implicit private val config: Configuration = Configuration.default.withDiscriminator("@type") - - private val derivedEncoder = deriveConfiguredEncoder[StorageError].mapJson(jsonError) - - implicit val storageErrorEncoder: Encoder[StorageError] = - Encoder.instance(r => derivedEncoder(r) deepMerge Json.obj("reason" -> Json.fromString(r.msg))) - - implicit val storageErrorStatusFrom: StatusFrom[StorageError] = { - case _: PathNotFound => StatusCodes.NotFound - case _: PathInvalid => StatusCodes.BadRequest - case AuthenticationFailed => StatusCodes.Unauthorized - case AuthorizationFailed => StatusCodes.Forbidden - case _ => StatusCodes.InternalServerError - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Storages.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Storages.scala deleted file mode 100644 index f0d46e822c..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/Storages.scala +++ /dev/null @@ -1,340 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.model.Uri -import akka.stream.Materializer -import akka.stream.alpakka.file.scaladsl.Directory -import akka.stream.scaladsl.{FileIO, Keep} -import cats.data.{EitherT, NonEmptyList} -import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.delta.kernel.utils.{CopyBetween, TransactionalFileCopier} -import ch.epfl.bluebrain.nexus.storage.File._ -import ch.epfl.bluebrain.nexus.storage.Rejection.PathNotFound -import ch.epfl.bluebrain.nexus.storage.StorageError.{InternalError, PermissionsFixingFailed} -import ch.epfl.bluebrain.nexus.storage.Storages.BucketExistence._ -import ch.epfl.bluebrain.nexus.storage.Storages.PathExistence._ -import ch.epfl.bluebrain.nexus.storage.Storages.{BucketExistence, PathExistence} -import ch.epfl.bluebrain.nexus.storage.attributes.AttributesComputation._ -import ch.epfl.bluebrain.nexus.storage.attributes.{AttributesCache, ContentTypeDetector} -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.{DigestConfig, StorageConfig} -import ch.epfl.bluebrain.nexus.storage.files.{CopyFileOutput, ValidateFile} -import ch.epfl.bluebrain.nexus.storage.routes.CopyFile -import org.apache.commons.io.FileUtils - -import java.nio.file.StandardCopyOption._ -import java.nio.file.{Files, Path} -import java.security.MessageDigest -import scala.concurrent.{ExecutionContext, Future} -import scala.sys.process._ - -trait Storages[Source] { - - /** - * Checks that the provided bucket name exists and it is readable/writable. - * - * @param name - * the storage bucket name - */ - def exists(name: String): BucketExistence - - /** - * Check whether the provided path already exists. - * - * @param name - * the storage bucket name - * @param path - * the path location - */ - def pathExists(name: String, path: Uri.Path): PathExistence - - /** - * Creates a file with the provided ''metadata'' and ''source'' on the provided ''filePath''. - * - * @param name - * the storage bucket name - * @param path - * the path location - * @param source - * the file content - * @return - * The file attributes containing the metadata (bytes and location) - */ - def createFile( - name: String, - path: Uri.Path, - source: Source - )(implicit bucketEv: BucketExists, pathEv: PathDoesNotExist): IO[FileAttributes] - - /** - * Copy files between locations inside the nexus folder. Attributes are neither recomputed nor fetched; it's assumed - * clients already have this information from the source files. - * - * @param name - * the storage bucket name - * @param files - * a list of source/destination files. The source files should exist under the nexus folder, and the destination - * files will be created there. Both must be files, not directories. - */ - def copyFiles( - name: String, - files: NonEmptyList[CopyFile] - )(implicit bucketEv: BucketExists, pathEv: PathDoesNotExist): IO[RejOr[NonEmptyList[CopyFileOutput]]] - - /** - * Moves a path from the provided ''sourcePath'' to ''destPath'' inside the nexus folder. - * - * @param name - * the storage bucket name - * @param sourcePath - * the source path location - * @param destPath - * the destination path location inside the nexus folder - * @return - * Left(rejection) or Right(fileAttributes). The file attributes contain the metadata (bytes and location) - */ - def moveFile( - name: String, - sourcePath: Uri.Path, - destPath: Uri.Path - )(implicit bucketEv: BucketExists): IO[RejOrAttributes] - - /** - * Retrieves the file as a Source. - * - * @param name - * the storage bucket name - * @param path - * the path to the file location - * @return - * Left(rejection), Right(source, Some(filename)) when the path is a file and Right(source, None) when the path is - * a directory - */ - def getFile( - name: String, - path: Uri.Path - )(implicit bucketEv: BucketExists, pathEv: PathExists): RejOr[(Source, Option[String])] - - /** - * Retrieves the attributes of the file. - * - * @param name - * the storage bucket name - * @param path - * the path to the file location - */ - def getAttributes( - name: String, - path: Uri.Path - )(implicit bucketEv: BucketExists, pathEv: PathExists): IO[FileAttributes] - -} - -object Storages { - - sealed trait BucketExistence { - def exists: Boolean - } - sealed trait PathExistence { - def exists: Boolean - } - - object BucketExistence { - final case object BucketExists extends BucketExistence { - val exists = true - } - final case object BucketDoesNotExist extends BucketExistence { - val exists = false - } - type BucketExists = BucketExists.type - type BucketDoesNotExist = BucketDoesNotExist.type - } - - object PathExistence { - final case object PathExists extends PathExistence { - val exists = true - } - final case object PathDoesNotExist extends PathExistence { - val exists = false - } - type PathExists = PathExists.type - type PathDoesNotExist = PathDoesNotExist.type - } - - final class DiskStorage( - config: StorageConfig, - contentTypeDetector: ContentTypeDetector, - digestConfig: DigestConfig, - cache: AttributesCache, - validateFile: ValidateFile, - copyFiles: TransactionalFileCopier - )(implicit - ec: ExecutionContext, - mt: Materializer - ) extends Storages[AkkaSource] { - - private val log = Logger[DiskStorage] - private val linkWithAtomicMove = config.linkWithAtomicMove.getOrElse(true) - - private def logUnsafe(msg: String): Unit = { - import cats.effect.unsafe.implicits.global - log.info(msg).unsafeRunSync() - } - - def exists(name: String): BucketExistence = { - val path = basePath(config, name) - logUnsafe(s"Checking bucket existence at path $path") - if (path.getParent.getParent != config.rootVolume) { - logUnsafe(s"Invalid bucket because the root volume is not two directories above $path") - BucketDoesNotExist - } else if (Files.isDirectory(path) && Files.isReadable(path)) BucketExists - else { - logUnsafe(s"Invalid bucket because $path is not a readable directory") - BucketDoesNotExist - } - } - - def pathExists(name: String, path: Uri.Path): PathExistence = { - val absPath = filePath(config, name, path) - if (Files.exists(absPath) && Files.isReadable(absPath) && descendantOf(absPath, basePath(config, name))) - PathExists - else { - logUnsafe(s"Invalid absolute path $absPath for bucket $name and relative path $path") - PathDoesNotExist - } - } - - def createFile( - name: String, - path: Uri.Path, - source: AkkaSource - )(implicit bucketEv: BucketExists, pathEv: PathDoesNotExist): IO[FileAttributes] = - for { - validated <- validateFile.forCreate(name, path) - _ <- log.info(s"Creating file in bucket $name at path $path") - _ <- IO.blocking(Files.createDirectories(validated.absDestPath.getParent)) - msgDigest <- IO.delay(MessageDigest.getInstance(digestConfig.algorithm)) - attributes <- streamFileContents(source, path, validated.absDestPath, msgDigest) - } yield attributes - - private def streamFileContents( - source: AkkaSource, - path: Uri.Path, - absFilePath: Path, - msgDigest: MessageDigest - ): IO[FileAttributes] = { - IO.fromFuture { - IO.delay { - source - .alsoToMat(sinkDigest(msgDigest))(Keep.right) - .toMat(FileIO.toPath(absFilePath)) { case (digFuture, ioFuture) => - digFuture.zipWith(ioFuture) { - case (digest, io) if absFilePath.toFile.exists() => - Future(FileAttributes(absFilePath.toAkkaUri, io.count, digest, contentTypeDetector(absFilePath))) - case _ => - Future.failed(InternalError(s"I/O error writing file to path '$path'")) - } - } - .run() - .flatten - } - } - } - - def moveFile( - name: String, - sourcePath: Uri.Path, - destPath: Uri.Path - )(implicit bucketEv: BucketExists): IO[Either[Rejection, FileAttributes]] = (for { - value <- EitherT(validateFile.forMoveIntoProtectedDir(name, sourcePath, destPath)) - attr <- EitherT.right[Rejection](fixPermissionsAndCopy(value.absSourcePath, value.absDestPath, value.isDir)) - } yield attr).value - - private def fixPermissionsAndCopy(absSourcePath: Path, absDestPath: Path, isDir: Boolean) = - fixPermissions(absSourcePath) >> - computeSizeAndMoveFile(absSourcePath, absDestPath, isDir) - - private def fixPermissions(path: Path): IO[Unit] = - if (config.fixerEnabled) { - val absPath = path.toAbsolutePath.normalize.toString - val logger = StringProcessLogger(config.fixerCommand, absPath) - val process = Process(config.fixerCommand :+ absPath) - - for { - _ <- log.info(s"Fixing permissions for file at $absPath") - exitCode <- IO.blocking(process ! logger) - _ <- IO.raiseUnless(exitCode == 0)(PermissionsFixingFailed(absPath, logger.toString)) - } yield () - } else log.info(s"Not changing permissions for file at $path") - - private def computeSizeAndMoveFile( - absSourcePath: Path, - absDestPath: Path, - isDir: Boolean - ): IO[FileAttributes] = - for { - computedSize <- size(absSourcePath) - msg = if (linkWithAtomicMove) "atomic move" else "copy and delete" - _ <- log.info(s"Performing link with $msg from $absSourcePath to $absDestPath") - _ <- if (linkWithAtomicMove) doMove(absSourcePath, absDestPath) - else doCopyAndDelete(absSourcePath, absDestPath, isDir) - _ <- IO.delay(cache.asyncComputePut(absDestPath, digestConfig.algorithm)) - mediaType <- IO.blocking(contentTypeDetector(absDestPath, isDir)) - } yield FileAttributes(absDestPath.toAkkaUri, computedSize, Digest.empty, mediaType) - - private def doMove(absSourcePath: Path, absDestPath: Path): IO[Unit] = - IO.blocking(Files.createDirectories(absDestPath.getParent)) >> - IO.blocking(Files.move(absSourcePath, absDestPath, ATOMIC_MOVE)).void - - private def doCopyAndDelete(absSourcePath: Path, absDestPath: Path, isDir: Boolean): IO[Unit] = - if (isDir) - IO.blocking(FileUtils.copyDirectory(absSourcePath.toFile, absDestPath.toFile)) >> - IO.blocking(FileUtils.deleteDirectory(absSourcePath.toFile)) - else - copyFiles.copyAll(NonEmptyList.of(CopyBetween.mk(absSourcePath, absDestPath))) >> - IO.blocking(Files.delete(absSourcePath)) - - private def size(absPath: Path): IO[Long] = - if (Files.isDirectory(absPath)) { - IO.fromFuture(IO.delay(Directory.walk(absPath).filter(Files.isRegularFile(_)).runFold(0L)(_ + Files.size(_)))) - } else if (Files.isRegularFile(absPath)) - IO.blocking(Files.size(absPath)) - else - IO.raiseError(InternalError(s"Path '$absPath' is not a file nor a directory")) - - def copyFiles( - destBucket: String, - files: NonEmptyList[CopyFile] - )(implicit bucketEv: BucketExists, pathEv: PathDoesNotExist): IO[RejOr[NonEmptyList[CopyFileOutput]]] = - (for { - validated <- - files.traverse(f => - EitherT(validateFile.forCopyWithinProtectedDir(f.sourceBucket, destBucket, f.source, f.destination)) - ) - copyBetween = validated.map(v => CopyBetween.mk(v.absSourcePath, v.absDestPath)) - _ <- EitherT.right[Rejection](copyFiles.copyAll(copyBetween)) - } yield files.zip(validated).map { case (raw, valid) => - CopyFileOutput(raw.source, raw.destination, valid.absSourcePath, valid.absDestPath) - }).value - - def getFile( - name: String, - path: Uri.Path - )(implicit bucketEv: BucketExists, pathEv: PathExists): RejOr[(AkkaSource, Option[String])] = { - val absPath = filePath(config, name, path) - if (Files.isRegularFile(absPath)) Right(fileSource(absPath) -> Some(absPath.getFileName.toString)) - else if (Files.isDirectory(absPath)) Right(folderSource(absPath) -> None) - else { - logUnsafe(s"Invalid absolute path $absPath for bucket $name and relative path $path") - Left(PathNotFound(name, path)) - } - } - - def getAttributes( - name: String, - path: Uri.Path - )(implicit bucketEv: BucketExists, pathEv: PathExists): IO[FileAttributes] = - cache.get(filePath(config, name, path)) - - } - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLogger.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLogger.scala deleted file mode 100644 index 272f87df2c..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLogger.scala +++ /dev/null @@ -1,51 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import org.slf4j.LoggerFactory - -import scala.sys.process.ProcessLogger - -/** - * Simple [[scala.sys.process.ProcessLogger]] implementation backed by a [[StringBuilder]]. - * - * @param cmd - * the command, used for logging purposes - * @param arg - * an optional command argument - * @note - * This expects a brief, single-line output. - */ -class StringProcessLogger(cmd: Seq[String], arg: Option[String]) extends ProcessLogger { - - private val logger = LoggerFactory.getLogger(cmd.mkString(" ")) - - private val builder = new StringBuilder - - override def out(s: => String): Unit = { - builder.append(s) - logger.debug(format(s, arg)) - } - - override def err(s: => String): Unit = { - builder.append(s) - logger.error(format(s, arg)) - } - - override def buffer[T](f: => T): T = f - - override def toString: String = builder.toString - - private def format(s: String, arg: Option[String]): String = - arg match { - case Some(a) => s"$a $s" - case None => s - } - -} - -object StringProcessLogger { - - def apply(cmd: Seq[String], arg: String): StringProcessLogger = new StringProcessLogger(cmd, Some(arg)) - - def apply(cmd: Seq[String]): StringProcessLogger = new StringProcessLogger(cmd, None) - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/TarFlow.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/TarFlow.scala deleted file mode 100644 index 84285fd3a7..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/TarFlow.scala +++ /dev/null @@ -1,76 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage -import java.nio.charset.StandardCharsets.UTF_8 -import java.nio.file.{Files, Path} - -import akka.NotUsed -import akka.stream.scaladsl.{FileIO, Flow, Source} -import akka.util.ByteString -import org.apache.commons.compress.archivers.tar.{TarArchiveEntry, TarConstants} - -/** - * Akka stream flows for writing tar balls. - */ -object TarFlow { - private val recordSize: Int = 512 - private val eofBlockSize: Int = recordSize * 2 - - private val terminalChunk = - ByteString.newBuilder.putBytes(Array.ofDim[Byte](eofBlockSize)).result() - - private def headerBytes(basePath: Path, path: Path): ByteString = { - val header = new TarArchiveEntry(path.toFile, basePath.getParent.relativize(path).toString) - val buffer = Array.ofDim[Byte](recordSize) - val builder = ByteString.newBuilder - def appendHeader(header: TarArchiveEntry): Unit = { - header.writeEntryHeader(buffer) - builder ++= buffer - } - - def padToBoundary(): Unit = { - val mod = builder.length % recordSize - if (mod != 0) builder ++= List.fill[Byte](recordSize - mod)(0) - } - - val nameAsBytes = header.getName.getBytes(UTF_8) - if (nameAsBytes.length > TarConstants.NAMELEN) { - val longNameHeader = new TarArchiveEntry(TarConstants.GNU_LONGLINK, TarConstants.LF_GNUTYPE_LONGNAME) - longNameHeader.setSize(nameAsBytes.length.toLong + 1L) // +1 for null - appendHeader(longNameHeader) - builder ++= nameAsBytes - builder += 0 - padToBoundary() - } - appendHeader(header) - padToBoundary() - builder.result() - } - - private def padToBoundary(path: Path): ByteString = { - val mod = (Files.size(path) % recordSize).toInt - if (mod == 0) ByteString.empty - else ByteString(Array.fill[Byte](recordSize - mod)(0)) - } - - /** - * Creates a Flow which deals with [[Path]] s - * 1. Creates the ByteString value for the [[Path]] Tar Header and wraps it in a Source 2. Creates a Source with - * the ByteString content of the [[Path]] 3. Creates a ByteString with padding (0s) to fill the previous Source, - * when needed The sources are concatenated: 1 --> 2 --> 3 - * - * @param basePath - * the base directory from where to create the tarball - * @return - * a Flow where the input is a [[Path]] and the output is a [[ByteString]] - */ - def writer(basePath: Path): Flow[Path, ByteString, NotUsed] = - Flow[Path] - .flatMapConcat { - case path if Files.isRegularFile(path) => - val headerSource = Source.single(headerBytes(basePath, path)) - val paddingSource = Source.single(padToBoundary(path)) - headerSource.concat(FileIO.fromPath(path)).concat(paddingSource) - case path => - Source.single(headerBytes(basePath, path)) - } - .concat(Source.single(terminalChunk)) -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/UriUtils.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/UriUtils.scala deleted file mode 100644 index 068c7064de..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/UriUtils.scala +++ /dev/null @@ -1,25 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.http.scaladsl.model.Uri - -object UriUtils { - - /** - * Adds a segment to the end of the Uri - */ - def addPath(uri: Uri, segment: String): Uri = { - if (segment.trim.isEmpty) uri - else { - val segmentStartsWithSlash = segment.startsWith("/") - val uriEndsWithSlash = uri.path.endsWithSlash - if (uriEndsWithSlash && segmentStartsWithSlash) - uri.copy(path = uri.path + segment.drop(1)) - else if (uriEndsWithSlash) - uri.copy(path = uri.path + segment) - else if (segmentStartsWithSlash) - uri.copy(path = uri.path / segment.drop(1)) - else - uri.copy(path = uri.path / segment) - } - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCache.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCache.scala deleted file mode 100644 index 1856e83cd8..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCache.scala +++ /dev/null @@ -1,80 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.attributes - -import akka.actor.{ActorRef, ActorSystem} -import akka.pattern.{ask, AskTimeoutException} -import akka.util.Timeout -import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.storage.File.FileAttributes -import ch.epfl.bluebrain.nexus.storage.StorageError.{InternalError, OperationTimedOut} -import ch.epfl.bluebrain.nexus.storage.attributes.AttributesCacheActor.Protocol._ -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.DigestConfig - -import java.nio.file.Path -import java.time.Clock -import scala.util.control.NonFatal - -trait AttributesCache { - - /** - * Fetches the file attributes for the provided absFilePath. If the digest is being computed or is going to be - * computed, a Digest.empty is returned - * - * @param filePath - * the absolute file path - * @return - * the file attributes wrapped in the effect type F - */ - def get(filePath: Path): IO[FileAttributes] - - /** - * Computes the file attributes and stores them asynchronously on the cache - * - * @param filePath - * the absolute file path - * @param algorithm - * the digest algorithm - */ - def asyncComputePut(filePath: Path, algorithm: String): Unit -} - -object AttributesCache { - - private val logger = Logger[this.type] - - def apply[Source](implicit - system: ActorSystem, - clock: Clock, - tm: Timeout, - computation: AttributesComputation[Source], - config: DigestConfig - ): AttributesCache = - apply(system.actorOf(AttributesCacheActor.props(computation))) - - private[attributes] def apply[F[_]]( - underlying: ActorRef - )(implicit tm: Timeout): AttributesCache = - new AttributesCache { - override def get(filePath: Path): IO[FileAttributes] = - IO.fromFuture(IO.delay(underlying ? Get(filePath))) - .flatMap[FileAttributes] { - case attributes: FileAttributes => IO.pure(attributes) - case other => - logger.error(s"Received unexpected reply from the file attributes cache: '$other'") >> - IO.raiseError(InternalError("Unexpected reply from the file attributes cache")) - } - .recoverWith { - case _: AskTimeoutException => - IO.raiseError(OperationTimedOut("reply from the file attributes cache timed out")) - case NonFatal(th) => - logger.error(th)("Exception caught while exchanging messages with the file attributes cache") >> - IO.raiseError( - InternalError("Exception caught while exchanging messages with the file attributes cache") - ) - } - - override def asyncComputePut(filePath: Path, algorithm: String): Unit = - underlying ! Compute(filePath) - - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheActor.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheActor.scala deleted file mode 100644 index 771a55d8cf..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheActor.scala +++ /dev/null @@ -1,158 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.attributes - -import akka.NotUsed -import akka.actor.{Actor, ActorLogging, ActorSystem, Props} -import akka.http.scaladsl.model.MediaTypes.`application/octet-stream` -import akka.stream.javadsl.Sink -import akka.stream.scaladsl.{Flow, Keep, Source} -import akka.stream.{OverflowStrategy, QueueOfferResult} -import cats.effect.unsafe.implicits._ -import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} -import ch.epfl.bluebrain.nexus.storage._ -import ch.epfl.bluebrain.nexus.storage.attributes.AttributesCacheActor.Protocol._ -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.DigestConfig - -import java.nio.file.Path -import java.time.Clock -import scala.collection.mutable -import scala.concurrent.duration._ - -/** - * Actor that stores a map with the attributes (value) for each path (key). The map also contains timestamps of the - * attributes being computed. The attributes computation is executed using a SourceQueue with parallelism defined by - * the ''concurrentComputations'' configuration flag. Once computed, a new message is sent back to the actor with the - * attributes to be stored in the map. - * - * @param computation - * the storage computation - * @tparam F - * the effect type - * @tparam S - * the source of the storage computation - */ -class AttributesCacheActor[S](computation: AttributesComputation[S])(implicit - config: DigestConfig, - clock: Clock -) extends Actor - with ActorLogging { - - implicit private val as: ActorSystem = context.system - - import context.dispatcher - private val map = mutable.LinkedHashMap.empty[String, Either[Long, FileAttributes]] - private val selfRef = self - - private val attributesComputation: Flow[Compute, Option[Put], NotUsed] = - Flow[Compute].mapAsyncUnordered(config.concurrentComputations) { case Compute(filePath) => - log.debug("Computing attributes for file '{}'.", filePath) - computation(filePath, config.algorithm) - .map(attributes => Option(Put(filePath, attributes))) - .recover(logAndSkip(filePath)) - .unsafeToFuture() - } - - private val sendMessage = Sink.foreach[Put](putMsg => selfRef ! putMsg) - - private val queue = - Source - .queue[Compute](config.maxInQueue, OverflowStrategy.dropHead) - .via(attributesComputation) - .collect { case Some(putMsg) => putMsg } - .toMat(sendMessage)(Keep.left) - .run() - - private def logAndSkip(filePath: Path): PartialFunction[Throwable, Option[Put]] = { case e => - log.error(e, "Attributes computation for file '{}' failed", filePath) - None - } - - override def receive: Receive = { - case Get(filePath) => - map.get(filePath.toString) match { - case Some(Right(attributes)) => - log.debug("Attributes for file '{}' fetched from the cache.", filePath) - sender() ! attributes - - case Some(Left(time)) if !needsReTrigger(time) => - log.debug( - "Attributes for file '{}' is being computed. Computation started {} ms ago.", - filePath, - now() - time - ) - sender() ! emptyAttributes(filePath) - - case Some(Left(_)) => - log.warning( - "Attributes for file '{}' is being computed but the elapsed time of '{}' expired.", - filePath, - config.retriggerAfter - ) - sender() ! emptyAttributes(filePath) - self ! Compute(filePath) - - case _ => - log.debug("Attributes for file '{}' not found in the cache.", filePath) - sender() ! emptyAttributes(filePath) - self ! Compute(filePath) - } - - case Put(filePath, attributes) => - map += filePath.toString -> Right(attributes) - - val diff = Math.max((map.size - config.maxInMemory).toInt, 0) - removeOldest(diff) - log.debug("Add computed attributes '{}' for file '{}' to the cache.", attributes, filePath) - - case compute @ Compute(filePath) => - if (map.contains(filePath.toString)) - log.debug("Attributes for file '{}' already computed. Do nothing.", filePath) - else { - map += filePath.toString -> Left(now()) - val _ = queue.offer(compute).map(logQueue(compute, _)) - } - - case msg => - log.error("Received a message '{}' incompatible with the expected", msg) - - } - - private def emptyAttributes(path: Path) = - FileAttributes(path.toAkkaUri, 0L, Digest.empty, `application/octet-stream`) - - private def removeOldest(n: Int) = - map --= map.take(n).keySet - - private def now(): Long = clock.instant().toEpochMilli - - private def needsReTrigger(time: Long): Boolean = { - val elapsed: FiniteDuration = (now() - time).millis - - elapsed > config.retriggerAfter - } - - private def logQueue(compute: Compute, result: QueueOfferResult): Unit = - result match { - case QueueOfferResult.Dropped => - log.error("The computation for the file '{}' was dropped from the queue.", compute.filePath) - case QueueOfferResult.Failure(ex) => - log.error(ex, "The computation for the file '{}' failed to be enqueued.", compute.filePath) - case _ => () - } - -} - -object AttributesCacheActor { - - def props[S]( - computation: AttributesComputation[S] - )(implicit config: DigestConfig, clock: Clock): Props = - Props(new AttributesCacheActor(computation)) - - sealed private[attributes] trait Protocol extends Product with Serializable - private[attributes] object Protocol { - final case class Get(filePath: Path) extends Protocol - final case class Put(filePath: Path, attributes: FileAttributes) extends Protocol - final case class Compute(filePath: Path) extends Protocol - } - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputation.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputation.scala deleted file mode 100644 index 5e13957ec9..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputation.scala +++ /dev/null @@ -1,80 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.attributes - -import akka.stream.Materializer -import akka.stream.scaladsl.{Keep, Sink} -import akka.util.ByteString -import cats.effect.IO -import cats.implicits._ -import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} -import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError -import ch.epfl.bluebrain.nexus.storage._ - -import java.nio.file.{Files, Path} -import java.security.MessageDigest -import scala.concurrent.{ExecutionContext, Future} -import scala.util.{Failure, Success, Try} - -trait AttributesComputation[Source] { - - /** - * Given a path and an algorithm, generates its FileAttributes - * - * @param path - * the path to the file - * @param algorithm - * the digest algorithm - * @return - * a computed file attributes, wrapped on the effect type F - */ - def apply(path: Path, algorithm: String): IO[FileAttributes] -} - -object AttributesComputation { - - private def sinkSize: Sink[ByteString, Future[Long]] = Sink.fold(0L)(_ + _.size) - - def sinkDigest(msgDigest: MessageDigest)(implicit ec: ExecutionContext): Sink[ByteString, Future[Digest]] = - Sink - .fold(msgDigest) { (digest, currentBytes: ByteString) => - digest.update(currentBytes.asByteBuffer) - digest - } - .mapMaterializedValue(_.map(dig => Digest(dig.getAlgorithm, dig.digest().map("%02x".format(_)).mkString))) - - /** - * A computation of attributes for a source of type AkkaSource - * - * @tparam F - * the effect type - * @return - * a AttributesComputation implemented for a source of type AkkaSource - */ - implicit def akkaAttributes(implicit - contentTypeDetector: ContentTypeDetector, - ec: ExecutionContext, - mt: Materializer - ): AttributesComputation[AkkaSource] = - (path: Path, algorithm: String) => { - if (!Files.exists(path)) IO.raiseError(InternalError(s"Path not found '$path'")) - else - Try(MessageDigest.getInstance(algorithm)) match { - case Success(msgDigest) => - val isDir = Files.isDirectory(path) - val source = if (isDir) folderSource(path) else fileSource(path) - IO.fromFuture( - IO.delay( - source - .alsoToMat(sinkSize)(Keep.right) - .toMat(sinkDigest(msgDigest)) { (bytesF, digestF) => - (bytesF, digestF).mapN { case (bytes, digest) => - FileAttributes(path.toAkkaUri, bytes, digest, contentTypeDetector(path, isDir)) - } - } - .run() - ) - ) - case Failure(_) => IO.raiseError(InternalError(s"Invalid algorithm '$algorithm'.")) - } - - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetector.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetector.scala deleted file mode 100644 index cc6050fb83..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetector.scala +++ /dev/null @@ -1,45 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.attributes - -import akka.http.scaladsl.model.{ContentType, MediaType, MediaTypes} -import akka.http.scaladsl.model.HttpCharsets.`UTF-8` -import akka.http.scaladsl.model.MediaTypes.{`application/octet-stream`, `application/x-tar`} -import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig -import ch.epfl.bluebrain.nexus.delta.kernel.utils.FileUtils - -import java.nio.file.{Files, Path} -import scala.util.Try - -final class ContentTypeDetector(config: MediaTypeDetectorConfig) { - - /** - * Detects the media type of the provided path, based on the custom detector, the file system detector available for - * a certain path or on the path extension. If the path is a directory, a application/x-tar content-type is returned - * - * @param path - * the path - * @param isDir - * flag to decide whether or not the path is a directory - */ - def apply(path: Path, isDir: Boolean = false): ContentType = - if (isDir) { - `application/x-tar` - } else { - val extension = FileUtils.extension(path.toFile.getName) - - val customDetector = extension.flatMap(config.find) - - def fileContentDetector = - for { - probed <- Try(Files.probeContentType(path)).toOption - rawContentType <- Option.when(probed != null && probed.nonEmpty)(probed) - parsedContentType <- MediaType.parse(rawContentType).toOption - } yield parsedContentType - - def defaultAkkaDetector = extension.flatMap { e => Try(MediaTypes.forExtension(e)).toOption } - - val mediaType = - customDetector.orElse(fileContentDetector).orElse(defaultAkkaDetector).getOrElse(`application/octet-stream`) - ContentType(mediaType, () => `UTF-8`) - } - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala deleted file mode 100644 index da68c9696a..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationError.scala +++ /dev/null @@ -1,20 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.auth - -import ch.epfl.bluebrain.nexus.delta.kernel.jwt.TokenRejection - -sealed abstract class AuthorizationError(message: String) extends Exception(message) with Product with Serializable { - override def fillInStackTrace(): AuthorizationError = this -} - -object AuthorizationError { - - final case object NoToken extends AuthorizationError("No token has been provided.") - final case class InvalidToken(tokenRejection: TokenRejection) extends AuthorizationError(tokenRejection.getMessage) - final case class UnauthorizedUser(issuer: String, subject: String) - extends AuthorizationError( - s"User '$subject' from realm '$issuer' wrongfully attempted to perform a call to this service." - ) - final case class TokenNotVerified(tokenRejection: TokenRejection) - extends AuthorizationError(tokenRejection.getMessage) - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala deleted file mode 100644 index 929d370a9f..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethod.scala +++ /dev/null @@ -1,67 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.auth - -import cats.data.{NonEmptyList, NonEmptySet} -import cats.syntax.all._ -import ch.epfl.bluebrain.nexus.delta.kernel.jwt.{AuthToken, ParsedToken} -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationError._ -import com.nimbusds.jose.jwk.{JWK, JWKSet} -import pureconfig.ConfigReader -import pureconfig.generic.semiauto.deriveReader -import pureconfig.module.cats._ - -import java.util -import scala.jdk.CollectionConverters._ - -/** - * Authorization config - */ -sealed trait AuthorizationMethod { - - /** - * Validates the incoming token - */ - def validate(token: Option[AuthToken]): Either[AuthorizationError, Unit] -} - -object AuthorizationMethod { - - /** - * No token/authorization is needed when performing calls - */ - final case object Anonymous extends AuthorizationMethod { - override def validate(token: Option[AuthToken]): Either[AuthorizationError, Unit] = Right(()) - } - - /** - * A token matching this realm and username is required and can be validated to the provided audiences and set of - * JSON Web Keys - */ - final case class VerifyToken(issuer: String, subject: String, audiences: Option[NonEmptySet[String]], keys: JWKSet) - extends AuthorizationMethod { - override def validate(token: Option[AuthToken]): Either[AuthorizationError, Unit] = { - for { - token <- token.toRight(NoToken) - parsedToken <- ParsedToken.fromToken(token).leftMap(InvalidToken) - _ <- Either.cond( - issuer == parsedToken.issuer && subject == parsedToken.subject, - (), - UnauthorizedUser(parsedToken.issuer, parsedToken.subject) - ) - _ <- parsedToken.validate(audiences, keys).leftMap(TokenNotVerified) - } yield () - } - } - - implicit val authorizationMethodConfigReader: ConfigReader[AuthorizationMethod] = { - implicit val jsonObjectReader: ConfigReader[util.Map[String, AnyRef]] = - ConfigReader.configObjectConfigReader.map(configObj => configObj.unwrapped()) - implicit val jwkSetReader: ConfigReader[JWKSet] = ConfigReader[NonEmptyList[util.Map[String, AnyRef]]].map { - jwkKeys => new JWKSet(jwkKeys.map(key => JWK.parse(key)).toList.asJava) - } - implicit val anonymousReader = deriveReader[Anonymous.type] - implicit val verifyToken: ConfigReader[VerifyToken] = deriveReader[VerifyToken] - - deriveReader[AuthorizationMethod] - } - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala deleted file mode 100644 index 7c40524cb6..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/AppConfig.scala +++ /dev/null @@ -1,136 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.config - -import akka.http.scaladsl.model.Uri -import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig -import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.OrderedKeys -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod -import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ - -import java.nio.file.Path -import scala.concurrent.duration.FiniteDuration - -/** - * Application configuration - * - * @param description - * service description - * @param http - * http interface configuration - * @param storage - * storages configuration - * @param authorization - * authorization configuration - * @param mediaTypeDetector - * media type configuration - * @param digest - * the digest configuration - */ -final case class AppConfig( - description: Description, - http: HttpConfig, - storage: StorageConfig, - authorization: AuthorizationMethod, - mediaTypeDetector: MediaTypeDetectorConfig, - digest: DigestConfig -) - -object AppConfig { - - /** - * Service description - * - * @param name - * service name - */ - final case class Description(name: String) { - - /** - * @return - * the version of the service - */ - val version: String = BuildInfo.version - - /** - * @return - * the full name of the service (name + version) - */ - val fullName: String = s"$name-${version.replaceAll("\\W", "-")}" - - } - - /** - * HTTP configuration - * - * @param interface - * interface to bind to - * @param port - * port to bind to - * @param prefix - * prefix to add to HTTP routes - * @param publicUri - * public URI of the service - */ - final case class HttpConfig(interface: String, port: Int, prefix: String, publicUri: Uri) - - /** - * Storages configuration - * - * @param rootVolume - * the base [[Path]] where the files are stored - * @param protectedDirectory - * the relative [[Path]] of the protected directory once the storage bucket is selected - * @param fixerEnabled - * call the permissions fixer when linking files, before they are moved - * @param fixerCommand - * the command and arguments to call the 'nexus-fixer' binary, e.g. List("sudo", "nexus-fixer") - */ - final case class StorageConfig( - rootVolume: Path, - extraPrefixes: List[Path], - protectedDirectory: Path, - fixerEnabled: Boolean, - fixerCommand: Vector[String], - linkWithAtomicMove: Option[Boolean] - ) - - /** - * The digest configuration. - * - * @param algorithm - * the digest algorithm - * @param maxInMemory - * the maximum number of algorithms stored in memory - * @param concurrentComputations - * the maximum number of concurrent computations of digest - * @param maxInQueue - * the maximum number of computations in queue to be computed - * @param retriggerAfter - * the amout of time after a digest which is still in the queue to be computed can be retrigger - */ - final case class DigestConfig( - algorithm: String, - maxInMemory: Long, - concurrentComputations: Int, - maxInQueue: Int, - retriggerAfter: FiniteDuration - ) - - implicit def toStorage(implicit config: AppConfig): StorageConfig = config.storage - implicit def toHttp(implicit config: AppConfig): HttpConfig = config.http - implicit def toDigest(implicit config: AppConfig): DigestConfig = config.digest - - val orderedKeys: OrderedKeys = OrderedKeys( - List( - "@context", - "@id", - "@type", - "reason", - "message", - "details", - "filename", - "location", - "bytes", - "" - ) - ) -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Contexts.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Contexts.scala deleted file mode 100644 index 1f6f1ce6dc..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Contexts.scala +++ /dev/null @@ -1,12 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.config - -import akka.http.scaladsl.model.Uri - -object Contexts { - - private val base = "https://bluebrain.github.io/nexus/contexts/" - - val errorCtxIri: Uri = s"${base}error.json" - val resourceCtxIri: Uri = s"${base}resource.json" - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Settings.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Settings.scala deleted file mode 100644 index 94ed2c8b49..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/config/Settings.scala +++ /dev/null @@ -1,35 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.config - -import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider} -import akka.http.scaladsl.model.Uri -import com.typesafe.config.Config -import pureconfig.ConvertHelpers._ -import pureconfig._ -import pureconfig.generic.auto._ - -/** - * Akka settings extension to expose application configuration. It typically uses the configuration instance of the - * actor system as the configuration root. - * - * @param config - * the configuration instance to read - */ -@SuppressWarnings(Array("LooksLikeInterpolatedString", "OptionGet")) -class Settings(config: Config) extends Extension { - - val appConfig: AppConfig = { - implicit val uriConverter: ConfigConvert[Uri] = - ConfigConvert.viaString[Uri](catchReadError(s => Uri(s)), _.toString) - ConfigSource.fromConfig(config).at("app").loadOrThrow[AppConfig] - } - -} - -object Settings extends ExtensionId[Settings] with ExtensionIdProvider { - - override def lookup: ExtensionId[_ <: Extension] = Settings - - override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config) - - def apply(config: Config): Settings = new Settings(config) -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/CopyFileOutput.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/CopyFileOutput.scala deleted file mode 100644 index 55cf5d3abf..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/CopyFileOutput.scala +++ /dev/null @@ -1,19 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.files - -import akka.http.scaladsl.model.Uri -import ch.epfl.bluebrain.nexus.storage.{encJPath, encUriPath} -import io.circe.Encoder -import io.circe.generic.semiauto.deriveEncoder - -import java.nio.file.Path - -final case class CopyFileOutput( - sourcePath: Uri.Path, - destinationPath: Uri.Path, - absoluteSourceLocation: Path, - absoluteDestinationLocation: Path -) - -object CopyFileOutput { - implicit val enc: Encoder[CopyFileOutput] = deriveEncoder -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/ValidateFile.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/ValidateFile.scala deleted file mode 100644 index 312a292bf1..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/files/ValidateFile.scala +++ /dev/null @@ -1,175 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.files - -import akka.http.scaladsl.model.Uri -import akka.stream.Materializer -import akka.stream.alpakka.file.scaladsl.Directory -import akka.stream.scaladsl.Sink -import cats.data.EitherT -import cats.effect.IO -import cats.effect.unsafe.implicits._ -import cats.implicits._ -import ch.epfl.bluebrain.nexus.storage.Rejection.{PathAlreadyExists, PathContainsLinks, PathNotFound} -import ch.epfl.bluebrain.nexus.storage.StorageError.PathInvalid -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.StorageConfig -import ch.epfl.bluebrain.nexus.storage.{basePath, descendantOf, filePath, RejOr, Rejection, StorageError} - -import java.nio.file.{Files, Path} - -trait ValidateFile { - def forCreate(name: String, destPath: Uri.Path): IO[ValidatedCreateFile] - - def forMoveIntoProtectedDir( - name: String, - sourcePath: Uri.Path, - destPath: Uri.Path - ): IO[RejOr[ValidatedMoveFile]] - - def forCopyWithinProtectedDir( - sourceBucket: String, - destBucket: String, - sourcePath: Uri.Path, - destPath: Uri.Path - ): IO[RejOr[ValidatedCopyFile]] -} - -sealed abstract case class ValidatedCreateFile(absDestPath: Path) -sealed abstract case class ValidatedMoveFile(name: String, absSourcePath: Path, absDestPath: Path, isDir: Boolean) -sealed abstract case class ValidatedCopyFile( - sourceBucket: String, - destBucket: String, - absSourcePath: Path, - absDestPath: Path -) - -object ValidateFile { - - def mk(config: StorageConfig)(implicit mt: Materializer): ValidateFile = - new ValidateFile { - - override def forCreate(name: String, destPath: Uri.Path): IO[ValidatedCreateFile] = { - val absDestPath = filePath(config, name, destPath) - throwIfIO(!descendantOf(absDestPath, basePath(config, name)), PathInvalid(name, destPath)) - .as(new ValidatedCreateFile(absDestPath) {}) - } - - override def forMoveIntoProtectedDir( - name: String, - sourcePath: Uri.Path, - destPath: Uri.Path - ): IO[RejOr[ValidatedMoveFile]] = { - - val bucketPath = basePath(config, name, protectedDir = false) - val bucketProtectedPath = basePath(config, name) - val absSourcePath = filePath(config, name, sourcePath, protectedDir = false) - val absDestPath = filePath(config, name, destPath) - - def notFound = PathNotFound(name, sourcePath) - - (for { - _ <- rejectIf(fileExists(absSourcePath).map(!_), notFound) - _ <- rejectIf(descendantOf(absSourcePath, bucketProtectedPath).pure[IO], notFound) - _ <- throwIf(!allowedPrefix(config, bucketPath, absSourcePath), PathInvalid(name, sourcePath)) - _ <- throwIf(!descendantOf(absDestPath, bucketProtectedPath), PathInvalid(name, destPath)) - _ <- rejectIf(fileExists(absDestPath), PathAlreadyExists(name, destPath)) - _ <- rejectIfFileIsSymbolicLinkOrContainsHardLink(name, sourcePath, absSourcePath) - isDir <- checkIfSourceIsFileOrDir(name, sourcePath, absSourcePath) - } yield new ValidatedMoveFile(name, absSourcePath, absDestPath, isDir) {}).value - } - - override def forCopyWithinProtectedDir( - sourceBucket: String, - destBucket: String, - sourcePath: Uri.Path, - destPath: Uri.Path - ): IO[RejOr[ValidatedCopyFile]] = { - - val sourceBucketProtectedPath = basePath(config, sourceBucket) - val destBucketProtectedPath = basePath(config, destBucket) - val absSourcePath = filePath(config, sourceBucket, sourcePath) - val absDestPath = filePath(config, destBucket, destPath) - - def notFound = PathNotFound(destBucket, sourcePath) - - (for { - _ <- rejectIf(fileExists(absSourcePath).map(!_), notFound) - _ <- rejectIf((!descendantOf(absSourcePath, sourceBucketProtectedPath)).pure[IO], notFound) - _ <- throwIf(!descendantOf(absDestPath, destBucketProtectedPath), PathInvalid(destBucket, destPath)) - _ <- rejectIf(fileExists(absDestPath), PathAlreadyExists(destBucket, destPath)) - isFile <- EitherT.right[Rejection](isRegularFile(absSourcePath)) - _ <- throwIf(!isFile, PathInvalid(sourceBucket, sourcePath)) - } yield new ValidatedCopyFile(sourceBucket, destBucket, absSourcePath, absDestPath) {}).value - } - - def fileExists(absSourcePath: Path): IO[Boolean] = IO.blocking(Files.exists(absSourcePath)) - def isRegularFile(absSourcePath: Path): IO[Boolean] = IO.blocking(Files.isRegularFile(absSourcePath)) - def isDirectory(absSourcePath: Path): IO[Boolean] = IO.blocking(Files.isDirectory(absSourcePath)) - def isSymbolicLink(absSourcePath: Path): IO[Boolean] = IO.blocking(Files.isSymbolicLink(absSourcePath)) - - def allowedPrefix(config: StorageConfig, bucketPath: Path, absSourcePath: Path) = - absSourcePath.startsWith(bucketPath) || - config.extraPrefixes.exists(absSourcePath.startsWith) - - def containsHardLink(absPath: Path): IO[Boolean] = - IO.blocking(Files.isDirectory(absPath)).flatMap { - case true => false.pure[IO] - case false => - IO.blocking(Files.getAttribute(absPath, "unix:nlink").asInstanceOf[Int]).map(_ > 1) - } - - def checkIfSourceIsFileOrDir( - name: String, - sourcePath: Uri.Path, - absSourcePath: Path - ): EitherT[IO, Rejection, Boolean] = - EitherT - .right[Rejection](isRegularFile(absSourcePath)) - .ifM( - ifTrue = EitherT.pure(false), - ifFalse = checkIfSourceIsValidDirectory(name, sourcePath, absSourcePath) - ) - - def checkIfSourceIsValidDirectory( - name: String, - sourcePath: Uri.Path, - absSourcePath: Path - ): EitherT[IO, Rejection, Boolean] = - EitherT - .right[Rejection](isDirectory(absSourcePath)) - .ifM( - ifTrue = rejectIfDirContainsLink(name, sourcePath, absSourcePath).as(true), - ifFalse = EitherT.leftT(PathNotFound(name, sourcePath)) - ) - - def rejectIfFileIsSymbolicLinkOrContainsHardLink( - name: String, - sourcePath: Uri.Path, - absSourcePath: Path - ): EitherT[IO, Rejection, Unit] = - rejectIf( - (isSymbolicLink(absSourcePath), containsHardLink(absSourcePath)).mapN(_ || _), - PathContainsLinks(name, sourcePath) - ) - - def dirContainsLink(path: Path): IO[Boolean] = - IO.fromFuture { - IO.delay { - Directory - .walk(path) - .mapAsync(1)(p => (isSymbolicLink(p), containsHardLink(p)).mapN(_ || _).unsafeToFuture()) - .takeWhile(_ == false, inclusive = true) - .runWith(Sink.last) - } - } - - def rejectIfDirContainsLink(name: String, sourcePath: Uri.Path, path: Path): EitherT[IO, Rejection, Unit] = - rejectIf(dirContainsLink(path), PathContainsLinks(name, sourcePath)) - - def rejectIf(cond: IO[Boolean], rej: Rejection): EitherT[IO, Rejection, Unit] = EitherT( - cond.ifF(Left(rej), Right(())) - ) - - def throwIf(cond: Boolean, e: StorageError): EitherT[IO, Rejection, Unit] = EitherT.right(throwIfIO(cond, e)) - - def throwIfIO(cond: Boolean, e: StorageError): IO[Unit] = IO.raiseWhen(cond)(e) - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/jsonld/JsonLdContext.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/jsonld/JsonLdContext.scala deleted file mode 100644 index 0d4f369016..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/jsonld/JsonLdContext.scala +++ /dev/null @@ -1,37 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.jsonld - -import akka.http.scaladsl.model.Uri -import io.circe.Json - -object JsonLdContext { - - object keywords { - val context = "@context" - } - - /** - * Adds a context Iri to an existing @context, or creates an @context with the Iri as a value. - */ - def addContext(json: Json, contextIri: Uri): Json = { - val jUriString = Json.fromString(contextIri.toString) - - json.mapObject { obj => - obj(keywords.context) match { - case None => obj.add(keywords.context, jUriString) - case Some(ctxValue) => - (ctxValue.asObject, ctxValue.asArray, ctxValue.asString) match { - case (Some(co), _, _) if co.isEmpty => obj.add(keywords.context, jUriString) - case (_, Some(ca), _) if ca.isEmpty => obj.add(keywords.context, jUriString) - case (_, _, Some(cs)) if cs.isEmpty => obj.add(keywords.context, jUriString) - case (Some(co), _, _) if !co.values.exists(_ == jUriString) => - obj.add(keywords.context, Json.arr(ctxValue, jUriString)) - case (_, Some(ca), _) if !ca.contains(jUriString) => - obj.add(keywords.context, Json.fromValues(ca :+ jUriString)) - case (_, _, Some(cs)) if cs != contextIri.toString => - obj.add(keywords.context, Json.arr(ctxValue, jUriString)) - case _ => obj - } - } - } - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/package.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/package.scala deleted file mode 100644 index 85b748943d..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/package.scala +++ /dev/null @@ -1,99 +0,0 @@ -package ch.epfl.bluebrain.nexus - -import akka.http.scaladsl.model.Uri -import akka.http.scaladsl.model.Uri.Path -import akka.stream.alpakka.file.scaladsl.Directory -import akka.stream.scaladsl.{FileIO, Source} -import akka.util.ByteString -import ch.epfl.bluebrain.nexus.storage.File.FileAttributes -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.StorageConfig -import ch.epfl.bluebrain.nexus.storage.config.Contexts.errorCtxIri -import io.circe.syntax._ -import io.circe.{Decoder, Encoder, Json} - -import java.net.URLDecoder -import java.nio.file.{Path => JPath, Paths} -import scala.annotation.tailrec -import scala.util.Try -import fs2.io.file.{Path => Fs2Path} - -package object storage { - - /** - * Source where the Output is ByteString and the Materialization is Any - */ - type AkkaSource = Source[ByteString, Any] - - /** - * Rejection or file attributes - */ - type RejOrAttributes = Either[Rejection, FileAttributes] - - /** - * Rejection or Path wrapped - */ - type RejOrPath = Either[Rejection, JPath] - - /** - * Rejection or Out attributes - */ - type RejOr[Out] = Either[Rejection, Out] - - implicit val encFs2Path: Encoder[Fs2Path] = Encoder[String].contramap[Fs2Path](_.toString) - implicit val encJPath: Encoder[JPath] = Encoder[String].contramap[JPath](_.toString) - - implicit val encUriPath: Encoder[Path] = Encoder.encodeString.contramap(_.toString()) - implicit val decUriPath: Decoder[Path] = Decoder.decodeString.emapTry(s => Try(Path(s))) - - implicit class PathSyntax(private val path: JPath) extends AnyVal { - - /** - * Converts a Java Path to an Akka [[Uri]] - */ - def toAkkaUri: Uri = { - val pathString = path.toUri.toString - if (pathString.endsWith("/")) Uri(pathString.dropRight(1)) else Uri(pathString) - } - } - - /** - * Build a Json error message that contains the keys @context and @type - */ - def jsonError(json: Json): Json = { - val typed = json.hcursor.get[String]("@type").map(v => Json.obj("@type" -> v.asJson)).getOrElse(Json.obj()) - typed deepMerge Json.obj("@context" -> Json.fromString(errorCtxIri.toString)) - } - - def folderSource(path: JPath): AkkaSource = Directory.walk(path).via(TarFlow.writer(path)) - - def fileSource(path: JPath): AkkaSource = FileIO.fromPath(path) - - /** - * Checks if the ''target'' path is a descendant of the ''parent'' path. E.g.: path = /some/my/path ; parent = /some - * will return true E.g.: path = /some/my/path ; parent = /other will return false - */ - def descendantOf(target: JPath, parent: JPath): Boolean = - inner(parent, target.getParent) - - @tailrec - @SuppressWarnings(Array("NullParameter")) - def inner(parent: JPath, child: JPath): Boolean = { - if (child == null) false - else if (parent == child) true - else inner(parent, child.getParent) - } - - private def decode(path: Uri.Path): String = - Try(URLDecoder.decode(path.toString, "UTF-8")).getOrElse(path.toString()) - - def basePath(config: StorageConfig, name: String, protectedDir: Boolean = true): JPath = { - val path = config.rootVolume.resolve(name).normalize() - if (protectedDir) path.resolve(config.protectedDirectory).normalize() else path - } - - def filePath(config: StorageConfig, name: String, path: Uri.Path, protectedDir: Boolean = true): JPath = { - val filePath = Paths.get(decode(path)) - if (filePath.isAbsolute) filePath.normalize() - else basePath(config, name, protectedDir).resolve(filePath).normalize() - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutes.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutes.scala deleted file mode 100644 index 05219c5a26..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutes.scala +++ /dev/null @@ -1,50 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.StatusCodes._ -import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.Route -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.Description -import ch.epfl.bluebrain.nexus.storage.routes.AppInfoRoutes.ServiceDescription -import ch.epfl.bluebrain.nexus.storage.routes.instances._ -import io.circe.generic.auto._ -import kamon.instrumentation.akka.http.TracingDirectives.operationName - -/** - * Akka HTTP route definition for service description - */ -class AppInfoRoutes(serviceDescription: ServiceDescription) { - - def routes: Route = - concat( - (get & pathEndOrSingleSlash) { - operationName("/") { - complete(OK -> serviceDescription) - } - } - ) -} - -object AppInfoRoutes { - - /** - * A service description. - * - * @param name - * the name of the service - * @param version - * the current version of the service - */ - final case class ServiceDescription(name: String, version: String) - - /** - * Default factory method for building [[AppInfoRoutes]] instances. - * - * @param config - * the description service configuration - * @return - * a new [[AppInfoRoutes]] instance - */ - def apply(config: Description): AppInfoRoutes = - new AppInfoRoutes(ServiceDescription(config.name, config.version)) - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala deleted file mode 100644 index 57d05082e3..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectives.scala +++ /dev/null @@ -1,35 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.headers.OAuth2BearerToken -import akka.http.scaladsl.server.Directive0 -import akka.http.scaladsl.server.Directives._ -import cats.effect.unsafe.implicits._ -import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.delta.kernel.jwt.AuthToken -import ch.epfl.bluebrain.nexus.storage.StorageError._ -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod - -object AuthDirectives { - - private val logger = Logger[this.type] - - /** - * Extracts the credentials from the HTTP Authorization Header and builds the [[AccessToken]] - */ - def validUser(implicit authorizationMethod: AuthorizationMethod): Directive0 = { - def validate(token: Option[AuthToken]): Directive0 = - authorizationMethod.validate(token) match { - case Left(error) => - onComplete(logger.error(error)("The user could not be validated.").unsafeToFuture()).flatMap { _ => - failWith(AuthenticationFailed) - } - case Right(_) => pass - } - - extractCredentials.flatMap { - case Some(OAuth2BearerToken(value)) => validate(Some(AuthToken(value))) - case Some(_) => failWith(AuthenticationFailed) - case _ => validate(None) - } - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/CopyFile.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/CopyFile.scala deleted file mode 100644 index 97dc73994b..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/CopyFile.scala +++ /dev/null @@ -1,11 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.Uri -import ch.epfl.bluebrain.nexus.storage._ -import io.circe.Decoder -import io.circe.generic.semiauto.deriveDecoder - -final case class CopyFile(sourceBucket: String, source: Uri.Path, destination: Uri.Path) -object CopyFile { - implicit val dec: Decoder[CopyFile] = deriveDecoder -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/PrefixDirectives.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/PrefixDirectives.scala deleted file mode 100644 index ec42332dbc..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/PrefixDirectives.scala +++ /dev/null @@ -1,40 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.Uri -import akka.http.scaladsl.model.Uri.Path -import akka.http.scaladsl.server.Directives.rawPathPrefix -import akka.http.scaladsl.server.{Directive0, PathMatcher} - -import scala.annotation.tailrec - -/** - * Collection of custom directives for matching against prefix paths. - */ -trait PrefixDirectives { - - final def stripTrailingSlashes(path: Path): Path = { - @tailrec - def strip(p: Path): Path = - p match { - case Path.Empty => Path.Empty - case Path.Slash(rest) => strip(rest) - case other => other - } - strip(path.reverse).reverse - } - - /** - * Creates a path matcher from the argument ''uri'' by stripping the slashes at the end of its path. The matcher is - * applied directly to the prefix of the unmatched path. - * - * @param uri - * the uri to use as a prefix - */ - final def uriPrefix(uri: Uri): Directive0 = - rawPathPrefix(PathMatcher(stripTrailingSlashes(uri.path), ())) -} - -/** - * Collection of custom directives for matching against prefix paths. - */ -object PrefixDirectives extends PrefixDirectives diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/RejectionHandling.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/RejectionHandling.scala deleted file mode 100644 index 4a6690679b..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/RejectionHandling.scala +++ /dev/null @@ -1,230 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.marshalling.{ToEntityMarshaller, ToResponseMarshallable} -import akka.http.scaladsl.model.EntityStreamSizeException -import akka.http.scaladsl.model.StatusCodes._ -import akka.http.scaladsl.model.headers._ -import akka.http.scaladsl.server.AuthenticationFailedRejection.CredentialsMissing -import akka.http.scaladsl.server.Directives.{complete, extractMaterializer, extractRequest} -import akka.http.scaladsl.server._ -import io.circe.generic.semiauto._ -import io.circe.syntax._ -import io.circe.{Encoder, Json} - -import scala.reflect.ClassTag - -/** - * A generic rejection handling that generates json responses and consumes the request. - */ -object RejectionHandling { - // $COVERAGE-OFF$ - - final val errorContextIri = "https://bluebrain.github.io/nexus/contexts/error.json" - - final private case class Error(`@type`: String, reason: String) - private object Error { - implicit final val genericEncoder: Encoder[Error] = - deriveEncoder[Error].mapJson { json => - val context = Json.obj("@context" -> Json.fromString(errorContextIri)) - json deepMerge context - } - } - - /** - * Discards the request entity bytes and completes the request with the argument. - * - * @param m - * a value to be marshalled into an HttpResponse - */ - final def rejectRequestEntityAndComplete(m: => ToResponseMarshallable): Route = { - extractRequest { request => - extractMaterializer { implicit mat => - request.discardEntityBytes() - complete(m) - } - } - } - - /** - * @return - * a rejection handler for the NotFound rejection. - */ - def notFound(implicit J: ToEntityMarshaller[Json]): RejectionHandler = { - implicit val errorMarshaller: ToEntityMarshaller[Error] = J.compose(_.asJson) - RejectionHandler - .newBuilder() - .handleNotFound { - rejectRequestEntityAndComplete(NotFound -> Error("NotFound", "The requested resource could not be found.")) - } - .result() - } - - /** - * A rejection handler for rejections of type ''A'' that uses the provided function ''f'' to complete the request. - * __Note__: the request entity bytes are automatically discarded. - * - * @see - * [[RejectionHandling.rejectRequestEntityAndComplete()]] - * @param f - * the function to use for handling rejections of type A - */ - def handle[A <: Rejection](f: A => ToResponseMarshallable)(implicit A: ClassTag[A]): RejectionHandler = - RejectionHandler - .newBuilder() - .handle { case A(a) => - rejectRequestEntityAndComplete(f(a)) - } - .result() - - /** - * A rejection handler for all the defined Akka rejections. __Note__: the request entity bytes are automatically - * discarded. - * - * @see - * [[RejectionHandling.rejectRequestEntityAndComplete()]] - */ - def apply(implicit J: ToEntityMarshaller[Json]): RejectionHandler = { - implicit val errorMarshaller: ToEntityMarshaller[Error] = J.compose(_.asJson) - RejectionHandler - .newBuilder() - .handleAll[SchemeRejection] { rejections => - val schemes = rejections.map(_.supported).mkString("'", "', '", "'") - val e = Error("UriSchemeNotAllowed", s"Uri scheme not allowed, supported schemes: $schemes.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handleAll[MethodRejection] { rejections => - val (methods, names) = rejections.map(r => r.supported -> r.supported.name).unzip - val namesString = names.mkString("'", "', '", "'") - val e = Error("HttpMethodNotAllowed", s"HTTP method not allowed, supported methods: $namesString.") - rejectRequestEntityAndComplete((MethodNotAllowed, List(Allow(methods)), e)) - } - .handle { case AuthorizationFailedRejection => - val e = Error("AuthorizationFailed", "The supplied authentication is not authorized to access this resource.") - rejectRequestEntityAndComplete(Forbidden -> e) - } - .handle { case MalformedFormFieldRejection(name, msg, _) => - val e = Error("MalformedFormField", s"The form field '$name' was malformed: '$msg'.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case MalformedHeaderRejection(name, msg, _) => - val e = Error("MalformedHeader", s"The value of HTTP header '$name' was malformed: '$msg'.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case MalformedQueryParamRejection(name, msg, _) => - val e = Error("MalformedQueryParam", s"The query parameter '$name' was malformed: '$msg'.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case MalformedRequestContentRejection(msg, throwable) => - val e = Error("MalformedRequestContent", s"The request content was malformed: '$msg'.") - val status = throwable match { - case _: EntityStreamSizeException => PayloadTooLarge - case _ => BadRequest - } - rejectRequestEntityAndComplete(status -> e) - } - .handle { case MissingCookieRejection(cookieName) => - val e = Error("MissingCookie", s"Request is missing required cookie '$cookieName'.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case MissingFormFieldRejection(fieldName) => - val e = Error("MissingFormField", s"Request is missing required form field '$fieldName'.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case MissingHeaderRejection(headerName) => - val e = Error("MissingHeader", s"Request is missing required HTTP header '$headerName'.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case InvalidOriginRejection(allowedOrigins) => - val e = - Error("InvalidOrigin", s"Allowed `Origin` header values: ${allowedOrigins.mkString("'", "', '", "'")}") - rejectRequestEntityAndComplete(Forbidden -> e) - } - .handle { case MissingQueryParamRejection(paramName) => - val e = Error("MissingQueryParam", s"Request is missing required query parameter '$paramName'.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case InvalidRequiredValueForQueryParamRejection(paramName, requiredValue, _) => - val reason = s"Request is missing required value '$requiredValue' for query parameter '$paramName'." - val e = Error("InvalidRequiredValueForQueryParam", reason) - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case RequestEntityExpectedRejection => - val e = Error("RequestEntityExpected", "Request entity expected but not supplied.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case TooManyRangesRejection(_) => - val e = Error("TooManyRanges", "Request contains too many ranges.") - rejectRequestEntityAndComplete(RangeNotSatisfiable -> e) - } - .handle { case CircuitBreakerOpenRejection(_) => - val e = Error("ServiceUnavailable", "The service is unavailable at this time.") - rejectRequestEntityAndComplete(ServiceUnavailable -> e) - } - .handle { case UnsatisfiableRangeRejection(unsatisfiableRanges, actualEntityLength) => - val ranges = unsatisfiableRanges.mkString("'", "', '", "'") - val reason = - s"None of the following requested Ranges were satisfiable for actual entity length '$actualEntityLength': $ranges" - val e = Error("UnsatisfiableRange", reason) - rejectRequestEntityAndComplete(RangeNotSatisfiable -> e) - } - .handleAll[AuthenticationFailedRejection] { rejections => - val reason = rejections.headOption.map(_.cause) match { - case Some(CredentialsMissing) => - "The resource requires authentication, which was not supplied with the request." - case _ => "The supplied authentication is invalid." - } - val e = Error("AuthenticationFailed", reason) - val header = `WWW-Authenticate`(HttpChallenges.oAuth2("*")) - rejectRequestEntityAndComplete((Unauthorized, List(header), e)) - } - .handleAll[UnacceptedResponseContentTypeRejection] { rejections => - val supported = rejections.flatMap(_.supported).map(_.format).mkString("'", "', '", "'") - val reason = s"Resource representation is only available with these types: $supported." - val e = Error("UnacceptedResponseContentType", reason) - rejectRequestEntityAndComplete(NotAcceptable -> e) - } - .handleAll[UnacceptedResponseEncodingRejection] { rejections => - val supported = rejections.flatMap(_.supported).map(_.value).mkString("'", "', '", "'") - val reason = s"Resource representation is only available with these Content-Encodings: $supported." - val e = Error("UnacceptedResponseEncoding", reason) - rejectRequestEntityAndComplete(NotAcceptable -> e) - } - .handleAll[UnsupportedRequestContentTypeRejection] { rejections => - val supported = rejections.flatMap(_.supported).mkString("'", "' or '", "'") - val reason = s"The request's Content-Type is not supported. Expected: $supported." - val e = Error("UnsupportedRequestContentType", reason) - rejectRequestEntityAndComplete(UnsupportedMediaType -> e) - } - .handleAll[UnsupportedRequestEncodingRejection] { rejections => - val supported = rejections.map(_.supported.value).mkString("'", "' or '", "'") - val reason = s"The request's Content-Encoding is not supported. Expected: $supported." - val e = Error("UnsupportedRequestEncoding", reason) - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case ExpectedWebSocketRequestRejection => - val e = Error("ExpectedWebSocketRequest", "Expected WebSocket Upgrade request.") - rejectRequestEntityAndComplete(BadRequest -> e) - } - .handle { case ValidationRejection(msg, _) => - val e = Error("ValidationRejection", msg) - rejectRequestEntityAndComplete(BadRequest -> e) - } - .result() - } - - /** - * A rejection handler for all predefined akka rejections and additionally for rejections of type ''A'' (using the - * provided function ''f'' to complete the request). __Note__: the request entity bytes are automatically discarded. - * - * @see - * [[RejectionHandling.rejectRequestEntityAndComplete()]] - * @param f - * the function to use for handling rejections of type A - */ - def apply[A <: Rejection: ClassTag]( - f: A => ToResponseMarshallable - )(implicit J: ToEntityMarshaller[Json]): RejectionHandler = - handle(f) withFallback apply - - // $COVERAGE-ON$ -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala deleted file mode 100644 index 6668d4291b..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/Routes.scala +++ /dev/null @@ -1,99 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.headers.{`WWW-Authenticate`, HttpChallenges} -import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.{ExceptionHandler, RejectionHandler, Route} -import cats.effect.unsafe.implicits._ -import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.storage.StorageError._ -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod -import ch.epfl.bluebrain.nexus.storage.config.AppConfig -import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ -import ch.epfl.bluebrain.nexus.storage.routes.AuthDirectives._ -import ch.epfl.bluebrain.nexus.storage.routes.PrefixDirectives._ -import ch.epfl.bluebrain.nexus.storage.routes.instances._ -import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Rejection, StorageError, Storages} - -import scala.util.control.NonFatal - -object Routes { - - private[this] val logger = Logger[this.type] - - /** - * @return - * an ExceptionHandler that ensures a descriptive message is returned to the caller - */ - final def exceptionHandler: ExceptionHandler = { - def completeGeneric(): Route = - complete(InternalError("The system experienced an unexpected error, please try again later."): StorageError) - - ExceptionHandler { - case AuthenticationFailed => - // suppress errors for authentication failures - val status = StorageError.storageErrorStatusFrom(AuthenticationFailed) - val header = `WWW-Authenticate`(HttpChallenges.oAuth2("*")) - complete((status, List(header), AuthenticationFailed: StorageError)) - case AuthorizationFailed => - // suppress errors for authorization failures - complete(AuthorizationFailed: StorageError) - case err: PathNotFound => - complete(err: StorageError) - case err: PathInvalid => - complete(err: StorageError) - case err: StorageError => - onComplete(logger.error(err)("Exception caught during routes processing").unsafeToFuture()) { _ => - completeGeneric() - } - case NonFatal(err) => - onComplete(logger.error(err)("Exception caught during routes processing").unsafeToFuture()) { _ => - completeGeneric() - } - - } - } - - /** - * @return - * a complete RejectionHandler for all library and code rejections - */ - final val rejectionHandler: RejectionHandler = - RejectionHandling.apply { r: Rejection => - r - } withFallback RejectionHandling.notFound withFallback RejectionHandler.default - - /** - * Wraps the provided route with rejection and exception handling. - * - * @param route - * the route to wrap - */ - final def wrap(route: Route)(implicit hc: HttpConfig): Route = - handleExceptions(exceptionHandler) { - handleRejections(rejectionHandler) { - uriPrefix(hc.publicUri) { - route - } - } - } - - /** - * Generates the routes for all the platform resources - * - * @param storages - * the storages operations - */ - def apply( - storages: Storages[AkkaSource] - )(implicit config: AppConfig, authorizationMethod: AuthorizationMethod): Route = - //TODO: Fetch Bearer token and verify identity - wrap { - concat( - AppInfoRoutes(config.description).routes, - (pathPrefix(config.http.prefix) & validUser) { - StorageRoutes(storages).routes - } - ) - } - -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StatusFrom.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StatusFrom.scala deleted file mode 100644 index cda7c31865..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StatusFrom.scala +++ /dev/null @@ -1,37 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.StatusCode - -/** - * Typeclass definition for ''A''s that can be mapped into a StatusCode. - * - * @tparam A - * generic type parameter - */ -trait StatusFrom[A] { - - /** - * Computes a [[akka.http.scaladsl.model.StatusCode]] instance from the argument value. - * - * @param value - * the input value - * @return - * the status code corresponding to the value - */ - def apply(value: A): StatusCode -} - -object StatusFrom { - - /** - * Lifts a function ''A => StatusCode'' into a ''StatusFrom[A]'' instance. - * - * @param f - * function from A to StatusCode - * @tparam A - * type parameter to map to StatusCode - * @return - * a ''StatusFrom'' instance from the argument function - */ - def apply[A](f: A => StatusCode): StatusFrom[A] = (value: A) => f(value) -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectives.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectives.scala deleted file mode 100644 index 2014b28759..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectives.scala +++ /dev/null @@ -1,146 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.Uri -import akka.http.scaladsl.model.Uri.Path -import akka.http.scaladsl.model.Uri.Path._ -import akka.http.scaladsl.server.Directives.{extractUnmatchedPath, failWith, pass, provide, reject} -import akka.http.scaladsl.server._ -import cats.data.NonEmptyList -import ch.epfl.bluebrain.nexus.storage.Rejection.{BucketNotFound, PathAlreadyExists, PathNotFound} -import ch.epfl.bluebrain.nexus.storage.StorageError.PathInvalid -import ch.epfl.bluebrain.nexus.storage.Storages -import ch.epfl.bluebrain.nexus.storage.Storages.PathExistence.{PathDoesNotExist, PathExists} -import ch.epfl.bluebrain.nexus.storage.Storages.BucketExistence.BucketExists - -import scala.annotation.tailrec - -object StorageDirectives { - - /** - * Extracts the path from the unmatched segments - * - * @param name - * the storage bucket name - */ - def extractPath(name: String): Directive1[Path] = - extractUnmatchedPath.flatMap(p => validatePath(name, p).tmap(_ => relativize(p))) - - private def pathInvalid(path: Path): Boolean = - path.toString.contains("//") || containsRelativeChar(path) - - /** - * Validates if the path is correct or malformed - * - * @param name - * the storage bucket name - * @param path - * the path to validate - */ - def validatePath(name: String, path: Path): Directive0 = - if (pathInvalid(path)) failWith(PathInvalid(name, path)) else pass - - def validatePaths(pathsByBucket: NonEmptyList[(String, Path)]): Directive0 = - pathsByBucket - .collectFirst[Directive0] { - case (bucket, p) if pathInvalid(p) => - failWith(PathInvalid(bucket, p)) - } - .getOrElse(pass) - - @tailrec - private def containsRelativeChar(path: Path): Boolean = - path match { - case Path.Empty => false - case Segment(head, _) if head == "." || head == ".." => true - case _ => containsRelativeChar(path.tail) - } - - /** - * Returns the evidence that a storage bucket exists - * - * @param name - * the storage bucket name - * @param storages - * the storages bundle api - * @return - * BucketExists when the storage bucket exists, rejection otherwise - */ - def bucketExists(name: String)(implicit storages: Storages[_]): Directive1[BucketExists] = - storages.exists(name) match { - case exists: BucketExists => provide(exists) - case _ => reject(BucketNotFound(name)) - } - - def bucketsExist(buckets: NonEmptyList[String])(implicit storages: Storages[_]): Directive1[BucketExists] = - buckets - .map(storages.exists) - .zip(buckets) - .collectFirst[Directive1[BucketExists]] { case (e, bucket) if !e.exists => reject(BucketNotFound(bucket)) } - .getOrElse(provide(BucketExists)) - - /** - * Returns the evidence that a path exists - * - * @param name - * the storage bucket name - * @param path - * the path location - * @param storages - * the storages bundle api - * @return - * PathExists when the path exists inside the bucket, rejection otherwise - */ - def pathExists(name: String, path: Uri.Path)(implicit - storages: Storages[_] - ): Directive1[PathExists] = - storages.pathExists(name, path) match { - case exists: PathExists => provide(exists) - case _ => reject(PathNotFound(name, path)) - } - - /** - * Returns the evidence that a path does not exist - * - * @param name - * the storage bucket name - * @param path - * the path location - * @param storages - * the storages bundle api - * @return - * PathDoesNotExist when the path does not exist inside the bucket, rejection otherwise - */ - def pathNotExists(name: String, path: Uri.Path)(implicit - storages: Storages[_] - ): Directive1[PathDoesNotExist] = - storages.pathExists(name, path) match { - case notExists: PathDoesNotExist => provide(notExists) - case _ => reject(PathAlreadyExists(name, path)) - } - - def pathsDoNotExist(name: String, paths: NonEmptyList[Uri.Path])(implicit - storages: Storages[_] - ): Directive1[PathDoesNotExist] = - paths - .collectFirst[Directive1[PathDoesNotExist]] { - case p if storages.pathExists(name, p).exists => - reject(PathAlreadyExists(name, p)) - } - .getOrElse(provide(PathDoesNotExist)) - - /** - * Extracts the relative file path from the unmatched segments - */ - def extractRelativeFilePath(name: String): Directive1[Path] = - extractPath(name).flatMap { - case path if path.reverse.startsWithSegment => provide(path) - case path => failWith(PathInvalid(name, path)) - } - - @tailrec - private def relativize(path: Path): Path = - path match { - case Slash(rest) => relativize(rest) - case rest => rest - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala deleted file mode 100644 index 7ca6accc78..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutes.scala +++ /dev/null @@ -1,150 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.MediaTypes._ -import akka.http.scaladsl.model.StatusCodes._ -import akka.http.scaladsl.model.{HttpEntity, StatusCode, Uri} -import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.server.Route -import cats.data.NonEmptyList -import cats.effect.unsafe.implicits._ -import cats.syntax.all._ -import ch.epfl.bluebrain.nexus.delta.kernel.Logger -import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} -import ch.epfl.bluebrain.nexus.storage.config.AppConfig -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.HttpConfig -import ch.epfl.bluebrain.nexus.storage.routes.StorageDirectives._ -import ch.epfl.bluebrain.nexus.storage.routes.StorageRoutes.{CopyFilePayload, LinkFile} -import ch.epfl.bluebrain.nexus.storage.routes.StorageRoutes.LinkFile._ -import ch.epfl.bluebrain.nexus.storage.routes.instances._ -import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} -import io.circe.generic.semiauto._ -import io.circe.{Decoder, DecodingFailure, Encoder} -import kamon.instrumentation.akka.http.TracingDirectives.operationName - -class StorageRoutes()(implicit storages: Storages[AkkaSource], hc: HttpConfig) { - - private val logger = Logger[StorageRoutes] - - def routes: Route = - // Consume buckets/{name}/ - (encodeResponse & pathPrefix("buckets" / Segment)) { name => - concat( - // Check bucket - (head & pathEndOrSingleSlash) { - operationName(s"/${hc.prefix}/buckets/{}") { - bucketExists(name).apply { _ => - complete(OK) - } - } - }, - // Consume files - pathPrefix("files") { - bucketExists(name).apply { implicit bucketExistsEvidence => - concat( - extractPath(name) { path => - operationName(s"/${hc.prefix}/buckets/{}/files/{}") { - concat( - put { - pathNotExists(name, path).apply { implicit pathNotExistEvidence => - // Upload file - fileUpload("file") { case (_, source) => - complete(Created -> storages.createFile(name, path, source).unsafeToFuture()) - } - } - }, - put { - // Link file/dir - entity(as[LinkFile]) { case LinkFile(source) => - validatePath(name, source) { - complete(storages.moveFile(name, source, path).runWithStatus(OK)) - } - } - }, - // Get file - get { - pathExists(name, path).apply { implicit pathExistsEvidence => - storages.getFile(name, path) match { - case Right((source, Some(_))) => complete(HttpEntity(`application/octet-stream`, source)) - case Right((source, None)) => complete(HttpEntity(`application/x-tar`, source)) - case Left(err) => complete(err) - } - } - } - ) - } - }, - operationName(s"/${hc.prefix}/buckets/{}/files") { - post { - // Copy files within protected directory between potentially different buckets - entity(as[CopyFilePayload]) { payload => - val files = payload.files - bucketsExist(files.map(_.sourceBucket)).apply { implicit bucketExistsEvidence => - pathsDoNotExist(name, files.map(_.destination)).apply { implicit pathNotExistEvidence => - validatePaths(files.map(c => c.sourceBucket -> c.source)) { - complete( - (logger.info(s"Received request to copy files: $files") >> - storages.copyFiles(name, files)).runWithStatus(Created) - ) - } - } - } - } - } - } - ) - } - }, - // Consume attributes - (pathPrefix("attributes") & extractPath(name)) { path => - operationName(s"/${hc.prefix}/buckets/{}/attributes/{}") { - bucketExists(name).apply { implicit bucketExistsEvidence => - // Get file attributes - get { - pathExists(name, path).apply { implicit pathExistsEvidence => - val result = storages.getAttributes(name, path).map[(StatusCode, FileAttributes)] { - case attr @ FileAttributes(_, _, Digest.empty, _) => Accepted -> attr - case attr => OK -> attr - } - complete(result.unsafeToFuture()) - } - } - } - } - } - ) - } -} - -object StorageRoutes { - - /** - * Link file request. - * - * @param source - * the location of the file/dir - */ - final private[routes] case class LinkFile(source: Uri.Path) - - private[routes] object LinkFile { - import ch.epfl.bluebrain.nexus.storage.{decUriPath, encUriPath} - implicit val dec: Decoder[LinkFile] = deriveDecoder[LinkFile] - implicit val enc: Encoder[LinkFile] = deriveEncoder[LinkFile] - } - - final private[routes] case class CopyFilePayload(files: NonEmptyList[CopyFile]) - private[routes] object CopyFilePayload { - implicit val dec: Decoder[CopyFilePayload] = Decoder.instance { cur => - cur - .as[NonEmptyList[CopyFile]] - .bimap( - _ => DecodingFailure("No files provided for copy operation", Nil), - files => CopyFilePayload(files) - ) - } - } - - final def apply(storages: Storages[AkkaSource])(implicit cfg: AppConfig): StorageRoutes = { - implicit val s = storages - new StorageRoutes() - } -} diff --git a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala b/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala deleted file mode 100644 index 1ac2ee6313..0000000000 --- a/storage/src/main/scala/ch/epfl/bluebrain/nexus/storage/routes/instances.scala +++ /dev/null @@ -1,129 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.marshalling.GenericMarshallers.eitherMarshaller -import akka.http.scaladsl.marshalling._ -import akka.http.scaladsl.model.MediaTypes._ -import akka.http.scaladsl.model._ -import cats.effect.IO -import cats.effect.unsafe.implicits._ -import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.sortKeys -import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport.OrderedKeys -import ch.epfl.bluebrain.nexus.storage.Rejection -import ch.epfl.bluebrain.nexus.storage.config.AppConfig._ -import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport -import io.circe._ -import io.circe.syntax._ -import ch.epfl.bluebrain.nexus.storage.MediaTypes.`application/ld+json` - -import scala.collection.immutable.Seq -import scala.concurrent.Future - -object instances extends LowPriority { - - /** - * `Either[Rejection,(StatusCode, A)]` => HTTP entity - * - * @tparam A - * type to encode - * @return - * marshaller for any `A` value - */ - implicit final def eitherValueMarshaller[A: Encoder](implicit - printer: Printer = defaultPrinter - ): ToResponseMarshaller[Either[Rejection, (StatusCode, A)]] = - eitherMarshaller(valueWithStatusCodeFromMarshaller[Rejection], valueWithStatusCodeMarshaller[A]) - - /** - * `A, StatusCodeFrom` => HTTP response - * - * @return - * marshaller for value - */ - implicit final def valueWithStatusCodeFromMarshaller[A: Encoder](implicit - statusFrom: StatusFrom[A], - printer: Printer = defaultPrinter, - ordered: OrderedKeys = orderedKeys - ): ToResponseMarshaller[A] = - jsonLdWithStatusCodeMarshaller.compose { value => - statusFrom(value) -> value.asJson - } - - implicit final class EitherFSyntax[A](f: IO[Either[Rejection, A]]) { - def runWithStatus(code: StatusCode): Future[Either[Rejection, (StatusCode, A)]] = - f.map(_.map(code -> _)).unsafeToFuture() - } - -} - -trait LowPriority extends FailFastCirceSupport { - - private[routes] val defaultPrinter = Printer.noSpaces.copy(dropNullValues = true) - - override def unmarshallerContentTypes: Seq[ContentTypeRange] = - List(`application/json`, `application/ld+json`) - - /** - * `StatusCode, Json` => HTTP response - * - * @return - * marshaller for JSON-LD value - */ - implicit final def jsonLdWithStatusCodeMarshaller(implicit - printer: Printer = defaultPrinter, - keys: OrderedKeys = orderedKeys - ): ToResponseMarshaller[(StatusCode, Json)] = - onOf(contentType => - Marshaller.withFixedContentType[(StatusCode, Json), HttpResponse](contentType) { case (status, json) => - HttpResponse(status = status, entity = HttpEntity(`application/ld+json`, printer.print(sortKeys(json)))) - } - ) - - /** - * `Json` => HTTP entity - * - * @return - * marshaller for JSON-LD value - */ - implicit final def jsonLdEntityMarshaller(implicit - printer: Printer = defaultPrinter, - keys: OrderedKeys = orderedKeys - ): ToEntityMarshaller[Json] = - onOf(contentType => - Marshaller.withFixedContentType[Json, MessageEntity](contentType) { json => - HttpEntity(`application/ld+json`, printer.print(sortKeys(json))) - } - ) - - /** - * `A` => HTTP entity - * - * @return - * marshaller for JSON-LD value - */ - implicit final def valueEntityMarshaller[A: Encoder](implicit - printer: Printer = defaultPrinter, - keys: OrderedKeys = orderedKeys - ): ToEntityMarshaller[A] = - jsonLdEntityMarshaller.compose(_.asJson) - - /** - * `StatusCode, A` => HTTP response - * - * @tparam A - * type to encode - * @return - * marshaller for any `A` value - */ - implicit final def valueWithStatusCodeMarshaller[A: Encoder](implicit - printer: Printer = defaultPrinter, - keys: OrderedKeys = orderedKeys - ): ToResponseMarshaller[(StatusCode, A)] = - jsonLdWithStatusCodeMarshaller.compose { case (status, value) => status -> value.asJson } - - private[routes] def onOf[A, Response]( - fMarshaller: MediaType.WithFixedCharset => Marshaller[A, Response] - ): Marshaller[A, Response] = { - val marshallers = Seq(`application/ld+json`, `application/json`).map(fMarshaller) - Marshaller.oneOf(marshallers: _*) - } -} diff --git a/storage/src/test/resources/app-info.json b/storage/src/test/resources/app-info.json deleted file mode 100644 index fee7734d05..0000000000 --- a/storage/src/test/resources/app-info.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "name" : "storage", - "version" : "{version}" -} diff --git a/storage/src/test/resources/app.conf b/storage/src/test/resources/app.conf deleted file mode 100644 index 3899e80d82..0000000000 --- a/storage/src/test/resources/app.conf +++ /dev/null @@ -1,7 +0,0 @@ -# All application specific configuration should reside here -app { - # Authorization method - authorization { - method = anonymous - } -} \ No newline at end of file diff --git a/storage/src/test/resources/content-type/file-example.json b/storage/src/test/resources/content-type/file-example.json deleted file mode 100644 index b12b0e3908..0000000000 --- a/storage/src/test/resources/content-type/file-example.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "content": "Example" -} \ No newline at end of file diff --git a/storage/src/test/resources/content-type/file.custom b/storage/src/test/resources/content-type/file.custom deleted file mode 100644 index 6b584e8ece..0000000000 --- a/storage/src/test/resources/content-type/file.custom +++ /dev/null @@ -1 +0,0 @@ -content \ No newline at end of file diff --git a/storage/src/test/resources/content-type/no-extension b/storage/src/test/resources/content-type/no-extension deleted file mode 100644 index 6b584e8ece..0000000000 --- a/storage/src/test/resources/content-type/no-extension +++ /dev/null @@ -1 +0,0 @@ -content \ No newline at end of file diff --git a/storage/src/test/resources/error.json b/storage/src/test/resources/error.json deleted file mode 100644 index e64e0595ad..0000000000 --- a/storage/src/test/resources/error.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "@context": "https://bluebrain.github.io/nexus/contexts/error.json", - "@type": "{type}", - "reason": "{reason}" -} \ No newline at end of file diff --git a/storage/src/test/resources/file-created.json b/storage/src/test/resources/file-created.json deleted file mode 100644 index 250688062e..0000000000 --- a/storage/src/test/resources/file-created.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "@context" : "https://bluebrain.github.io/nexus/contexts/resource.json", - "_location" : "{location}", - "_mediaType" : "{mediaType}", - "_bytes" : {bytes}, - "_digest" : { - "_algorithm" : "{algorithm}", - "_value" : "{value}" - } -} \ No newline at end of file diff --git a/storage/src/test/resources/file-link.json b/storage/src/test/resources/file-link.json deleted file mode 100644 index fdeedf47a8..0000000000 --- a/storage/src/test/resources/file-link.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "source": "{source}" -} \ No newline at end of file diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/DiskStorageSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/DiskStorageSpec.scala deleted file mode 100644 index a2ef4bc76a..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/DiskStorageSpec.scala +++ /dev/null @@ -1,537 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.actor.ActorSystem -import akka.http.scaladsl.model.ContentTypes._ -import akka.http.scaladsl.model.MediaTypes.`application/x-tar` -import akka.http.scaladsl.model.Uri -import akka.stream.scaladsl.{Sink, Source} -import akka.testkit.TestKit -import akka.util.ByteString -import cats.data.NonEmptyList -import cats.effect.IO -import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig -import ch.epfl.bluebrain.nexus.delta.kernel.utils.TransactionalFileCopier -import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} -import ch.epfl.bluebrain.nexus.storage.Rejection.{PathAlreadyExists, PathNotFound} -import ch.epfl.bluebrain.nexus.storage.StorageError.{PathInvalid, PermissionsFixingFailed} -import ch.epfl.bluebrain.nexus.storage.Storages.BucketExistence.{BucketDoesNotExist, BucketExists} -import ch.epfl.bluebrain.nexus.storage.Storages.DiskStorage -import ch.epfl.bluebrain.nexus.storage.Storages.PathExistence.{PathDoesNotExist, PathExists} -import ch.epfl.bluebrain.nexus.storage.attributes.{AttributesCache, ContentTypeDetector} -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.{DigestConfig, StorageConfig} -import ch.epfl.bluebrain.nexus.storage.files.{CopyFileOutput, ValidateFile} -import ch.epfl.bluebrain.nexus.storage.routes.CopyFile -import ch.epfl.bluebrain.nexus.storage.utils.Randomness -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import org.mockito.IdiomaticMockito -import org.scalatest.concurrent.ScalaFutures.convertScalaFuture -import org.scalatest.{BeforeAndAfterAll, Inspectors} - -import java.io.File -import java.nio.charset.StandardCharsets -import java.nio.charset.StandardCharsets.UTF_8 -import java.nio.file.{Files, Paths} -import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ -import scala.reflect.io.Directory - -class DiskStorageSpec - extends TestKit(ActorSystem("DiskStorageSpec")) - with CatsEffectSpec - with Randomness - with BeforeAndAfterAll - with Inspectors - with IdiomaticMockito { - - implicit val ec: ExecutionContext = system.dispatcher - - val rootPath = Files.createTempDirectory("storage-test") - val scratchPath = Files.createTempDirectory("scratch") - val sConfig = - StorageConfig(rootPath, List(scratchPath), Paths.get("nexus"), fixerEnabled = true, Vector("/bin/echo"), None) - val dConfig = DigestConfig("SHA-256", 1L, 1, 1, 1.second) - val contentTypeDetector = new ContentTypeDetector(MediaTypeDetectorConfig.Empty) - val cache = mock[AttributesCache] - val validateFile = ValidateFile.mk(sConfig) - val copyFiles = TransactionalFileCopier.mk() - val storage = mkDiskStorage(sConfig) - - private def mkDiskStorage(cfg: StorageConfig) = - new DiskStorage(cfg, contentTypeDetector, dConfig, cache, validateFile, copyFiles) - - override def afterAll(): Unit = { - Directory(rootPath.toFile).deleteRecursively() - () - } - - trait AbsoluteDirectoryCreated { - val name = randomString() - val baseRootPath = rootPath.resolve(name) - val basePath = baseRootPath.resolve(sConfig.protectedDirectory) - Files.createDirectories(rootPath.resolve(name).resolve(sConfig.protectedDirectory)) - Files.createDirectories(scratchPath) - } - - trait RelativeDirectoryCreated extends AbsoluteDirectoryCreated { - val relativeDir = s"some/${randomString()}" - val absoluteDir = basePath.resolve(relativeDir) - val relativeFileString = s"$relativeDir/file.txt" - val relativeFilePath = Uri.Path(relativeFileString) - val absoluteFilePath = basePath.resolve(Paths.get(relativeFilePath.toString())) - Files.createDirectories(absoluteFilePath.getParent) - implicit val bucketExistsEvidence: BucketExists = BucketExists - val alg = "SHA-256" - } - - "A disk storage bundle" when { - - "checking storage" should { - - "fail when bucket directory does not exists" in { - val name = randomString() - storage.exists(name) shouldBe a[BucketDoesNotExist] - } - - "fail when bucket is not a directory" in { - val name = randomString() - val directory = Files.createDirectories(rootPath.resolve(name)) - Files.createFile(directory.resolve(sConfig.protectedDirectory)) - storage.exists(name) shouldBe a[BucketDoesNotExist] - } - - "fail when the bucket goes out of the scope" in new AbsoluteDirectoryCreated { - val invalid = List("one/two", "../other", "..", "one/two/three") - forAll(invalid) { - storage.exists(_) shouldBe a[BucketDoesNotExist] - } - } - - "pass" in new AbsoluteDirectoryCreated { - storage.exists(name) shouldBe a[BucketExists] - } - } - - "checking path existence" should { - - "exists" in new AbsoluteDirectoryCreated { - val relativeFileString = "some/file.txt" - val relativeFilePath = Uri.Path(relativeFileString) - Files.createDirectories(basePath.resolve("some")) - val filePath = basePath.resolve(relativeFileString) - Files.createFile(filePath) - storage.pathExists(name, relativeFilePath) shouldBe a[PathExists] - } - - "exists outside scope" in new AbsoluteDirectoryCreated { - val relativeFileString = "../some/file.txt" - val relativeFilePath = Uri.Path(relativeFileString) - Files.createDirectories(basePath.resolve("..").resolve("some").normalize()) - val filePath = basePath.resolve(relativeFileString).normalize() - Files.createFile(filePath) - storage.pathExists(name, relativeFilePath) shouldBe a[PathDoesNotExist] - } - - "not exists" in new RelativeDirectoryCreated { - storage.pathExists(name, relativeFilePath) shouldBe a[PathDoesNotExist] - } - } - - "creating a file" should { - - "fail when destination is out of bucket scope" in new RelativeDirectoryCreated { - val content = "some content" - val source: AkkaSource = Source.single(ByteString(content)) - implicit val pathDoesNotExist: PathDoesNotExist = PathDoesNotExist - val relativePath = Uri.Path("some/../../path") - storage.createFile(name, relativePath, source).rejectedWith[StorageError] shouldEqual - PathInvalid(name, relativePath) - } - - "pass with relative path" in new RelativeDirectoryCreated { - val content = "some content" - val source: AkkaSource = Source.single(ByteString(content)) - val digest = Digest("SHA-256", "290f493c44f5d63d06b374d0a5abd292fae38b92cab2fae5efefe1b0e9347f56") - implicit val pathDoesNotExist: PathDoesNotExist = PathDoesNotExist - storage.createFile(name, relativeFilePath, source).accepted shouldEqual - FileAttributes(s"file://${absoluteFilePath.toString}", 12L, digest, `text/plain(UTF-8)`) - } - - "pass with absolute path" in new RelativeDirectoryCreated { - val content = "some content" - val source: AkkaSource = Source.single(ByteString(content)) - val digest = Digest("SHA-256", "290f493c44f5d63d06b374d0a5abd292fae38b92cab2fae5efefe1b0e9347f56") - implicit val pathDoesNotExist: PathDoesNotExist = PathDoesNotExist - storage.createFile(name, Uri.Path(absoluteFilePath.toString), source).accepted shouldEqual - FileAttributes(s"file://${absoluteFilePath.toString}", 12L, digest, `text/plain(UTF-8)`) - } - } - - "linking" should { - implicit val bucketExistsEvidence = BucketExists - - "fail when call to nexus-fixer fails" in new AbsoluteDirectoryCreated { - val falseBinary = if (new File("/bin/false").exists()) "/bin/false" else "/usr/bin/false" - val badStorage = mkDiskStorage(sConfig.copy(fixerCommand = Vector(falseBinary))) - val file = "some/folder/my !file.txt" - val absoluteFile = baseRootPath.resolve(Paths.get(file)) - Files.createDirectories(absoluteFile.getParent) - Files.write(absoluteFile, "something".getBytes(StandardCharsets.UTF_8)) - - badStorage - .moveFile(name, Uri.Path(file), Uri.Path(randomString())) - .rejectedWith[StorageError] shouldEqual PermissionsFixingFailed(absoluteFile.toString, "") - } - - "fail when source does not exists" in new AbsoluteDirectoryCreated { - val source = randomString() - storage.moveFile(name, Uri.Path(source), Uri.Path(randomString())).accepted.leftValue shouldEqual - PathNotFound(name, Uri.Path(source)) - } - - "fail when source is inside protected directory" in new AbsoluteDirectoryCreated { - val file = sConfig.protectedDirectory.toString + "/other.txt" - val absoluteFile = baseRootPath.resolve(Paths.get(file)) - Files.createDirectories(absoluteFile.getParent) - Files.write(absoluteFile, "something".getBytes(StandardCharsets.UTF_8)) - - storage.moveFile(name, Uri.Path(file), Uri.Path(randomString())).accepted.leftValue shouldEqual - PathNotFound(name, Uri.Path(file)) - } - - "fail when destination already exists" in new AbsoluteDirectoryCreated { - val file = "some/folder/my !file.txt" - val absoluteFile = baseRootPath.resolve(Paths.get(file.toString)) - Files.createDirectories(absoluteFile.getParent) - Files.write(absoluteFile, "something".getBytes(StandardCharsets.UTF_8)) - - val fileDest = basePath.resolve(Paths.get("my !file.txt")) - Files.write(fileDest, "something".getBytes(StandardCharsets.UTF_8)) - - storage - .moveFile(name, Uri.Path(file), Uri.Path("my !file.txt")) - .accepted - .leftValue shouldEqual - PathAlreadyExists(name, Uri.Path("my !file.txt")) - } - - "fail when destination is out of bucket scope" in new AbsoluteDirectoryCreated { - val file = "some/folder/my !file.txt" - val dest = Uri.Path("../some/other path.txt") - val absoluteFile = baseRootPath.resolve(Paths.get(file.toString)) - Files.createDirectories(absoluteFile.getParent) - - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - - storage.moveFile(name, Uri.Path(file), dest).rejectedWith[StorageError] shouldEqual - PathInvalid(name, dest) - Files.exists(absoluteFile) shouldEqual true - } - - "fail on absolute source path not starting with allowed prefix" in new AbsoluteDirectoryCreated { - val file = "some/folder/my !file.txt" - val absoluteFile = rootPath.resolve(Paths.get(file)) - Files.createDirectories(absoluteFile.getParent) - - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - - storage - .moveFile(name, Uri.Path(absoluteFile.toString), Uri.Path("some/other path.txt")) - .rejectedWith[StorageError] shouldEqual - PathInvalid(name, Uri.Path(absoluteFile.toString)) - Files.exists(absoluteFile) shouldEqual true - } - - "fail on directory path not starting with allowed prefix" in new AbsoluteDirectoryCreated { - val dir = "some/folder" - val absoluteDir = rootPath.resolve(Paths.get(dir.toString)) - Files.createDirectories(absoluteDir) - - val absoluteFile = absoluteDir.resolve(Paths.get("my !file.txt")) - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - - val result = storage - .moveFile(name, Uri.Path(absoluteDir.toString), Uri.Path("some/other")) - .rejectedWith[StorageError] - result shouldEqual PathInvalid(name, Uri.Path(absoluteDir.toString)) - } - - val linkingOptions: List[Option[Boolean]] = List(Option(true), Option(false), None) - - "pass on file specified using a relative path" in { - forAll(linkingOptions) { linkWithAtomicMove => - val diskStorage = mkDiskStorage(sConfig.copy(linkWithAtomicMove = linkWithAtomicMove)) - new AbsoluteDirectoryCreated { - val file = "some/folder/my !file.txt" - val absoluteFile = baseRootPath.resolve(Paths.get(file.toString)) - Files.createDirectories(absoluteFile.getParent) - - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - - diskStorage.moveFile(name, Uri.Path(file), Uri.Path("some/other path.txt")).accepted.rightValue shouldEqual - FileAttributes( - s"file://${basePath.resolve("some/other%20path.txt")}", - 12L, - Digest.empty, - `text/plain(UTF-8)` - ) - Files.exists(absoluteFile) shouldEqual false - Files.exists(basePath.resolve("some/other path.txt")) shouldEqual true - } - } - } - - "pass on file specified using an absolute path" in { - forAll(linkingOptions) { linkWithAtomicMove => - val diskStorage = mkDiskStorage(sConfig.copy(linkWithAtomicMove = linkWithAtomicMove)) - new AbsoluteDirectoryCreated { - val file = "some/folder/my !file.txt" - val absoluteFile = scratchPath.resolve(Paths.get(file)) - Files.createDirectories(absoluteFile.getParent) - - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - - diskStorage - .moveFile(name, Uri.Path(absoluteFile.toString), Uri.Path("some/other path.txt")) - .accepted - .rightValue shouldEqual - FileAttributes( - s"file://${basePath.resolve("some/other%20path.txt")}", - 12L, - Digest.empty, - `text/plain(UTF-8)` - ) - - Files.exists(absoluteFile) shouldEqual false - Files.exists(basePath.resolve("some/other path.txt")) shouldEqual true - } - } - } - - "pass on directory specified with a relative path" in { - forAll(linkingOptions) { linkWithAtomicMove => - val diskStorage = mkDiskStorage(sConfig.copy(linkWithAtomicMove = linkWithAtomicMove)) - new AbsoluteDirectoryCreated { - val dir = "some/folder" - val absoluteDir = baseRootPath.resolve(Paths.get(dir.toString)) - Files.createDirectories(absoluteDir) - - val absoluteFile = absoluteDir.resolve(Paths.get("my !file.txt")) - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - - val result = diskStorage.moveFile(name, Uri.Path(dir), Uri.Path("some/other")).accepted.rightValue - val resolvedDir = basePath.resolve("some/other") - result shouldEqual FileAttributes(s"file://$resolvedDir", 12L, Digest.empty, `application/x-tar`) - Files.exists(absoluteDir) shouldEqual false - Files.exists(absoluteFile) shouldEqual false - Files.exists(resolvedDir) shouldEqual true - Files.exists(basePath.resolve("some/other/my !file.txt")) shouldEqual true - } - } - } - - "pass on directory specified with an absolute path" in { - forAll(linkingOptions) { linkWithAtomicMove => - val diskStorage = mkDiskStorage(sConfig.copy(linkWithAtomicMove = linkWithAtomicMove)) - new AbsoluteDirectoryCreated { - val dir = "some/folder" - val absoluteDir = scratchPath.resolve(Paths.get(dir.toString)) - Files.createDirectories(absoluteDir) - - val absoluteFile = absoluteDir.resolve(Paths.get("my !file.txt")) - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - - val result = - diskStorage.moveFile(name, Uri.Path(absoluteDir.toString), Uri.Path("some/other")).accepted.rightValue - val resolvedDir = basePath.resolve("some/other") - result shouldEqual FileAttributes(s"file://$resolvedDir", 12L, Digest.empty, `application/x-tar`) - Files.exists(absoluteDir) shouldEqual false - Files.exists(absoluteFile) shouldEqual false - Files.exists(resolvedDir) shouldEqual true - Files.exists(basePath.resolve("some/other/my !file.txt")) shouldEqual true - } - } - } - } - - "copying" should { - implicit val bucketExistsEvidence = BucketExists - implicit val pathDoesNotExist: PathDoesNotExist = PathDoesNotExist - - "fail when source does not exists" in new AbsoluteDirectoryCreated { - val source = randomString() - val files = NonEmptyList.of(CopyFile(name, Uri.Path(source), Uri.Path(randomString()))) - storage.copyFiles(name, files).accepted.leftValue shouldEqual PathNotFound(name, Uri.Path(source)) - } - - "fail when source is not inside protected directory" in new AbsoluteDirectoryCreated { - val file = "some/folder/my !file.txt" - val absoluteFile = baseRootPath.resolve(Paths.get(file)) - Files.createDirectories(absoluteFile.getParent) - Files.write(absoluteFile, "something".getBytes(StandardCharsets.UTF_8)) - val files = NonEmptyList.of(CopyFile(name, Uri.Path(file), Uri.Path(randomString()))) - - storage.copyFiles(name, files).accepted.leftValue shouldEqual PathNotFound(name, Uri.Path(file)) - } - - "fail when destination already exists" in new AbsoluteDirectoryCreated { - val file = sConfig.protectedDirectory.toString + "/my !file.txt" - val absoluteFile = basePath.resolve(Paths.get(file)) - Files.createDirectories(absoluteFile.getParent) - Files.write(absoluteFile, "something".getBytes(StandardCharsets.UTF_8)) - - val destFile = "destFile.txt" - val resolvedDestFile = basePath.resolve(Paths.get(destFile)) - Files.write(resolvedDestFile, "somethingelse".getBytes(StandardCharsets.UTF_8)) - val files = NonEmptyList.of(CopyFile(name, Uri.Path(file), Uri.Path(destFile))) - - storage.copyFiles(name, files).accepted.leftValue shouldEqual PathAlreadyExists(name, Uri.Path(destFile)) - } - - "fail when destination is out of bucket scope" in new AbsoluteDirectoryCreated { - val file = sConfig.protectedDirectory.toString + "/my !file.txt" - val dest = Uri.Path("../some/other path.txt") - val absoluteFile = basePath.resolve(Paths.get(file.toString)) - Files.createDirectories(absoluteFile.getParent) - - val content = "some content" - Files.write(absoluteFile, content.getBytes(StandardCharsets.UTF_8)) - val files = NonEmptyList.of(CopyFile(name, Uri.Path(file), dest)) - - storage.copyFiles(name, files).rejectedWith[StorageError] shouldEqual PathInvalid(name, dest) - - Files.exists(absoluteFile) shouldEqual true - } - - "pass on file in same bucket specified for absolute/relative path" in { - forAll(List(true, false)) { useRelativePath => - new AbsoluteDirectoryCreated { - val sourceFile = sConfig.protectedDirectory.toString + "/my !file.txt" - val absoluteSourceFile = basePath.resolve(Paths.get(sourceFile)) - Files.createDirectories(absoluteSourceFile.getParent) - - val content = "some content" - Files.write(absoluteSourceFile, content.getBytes(StandardCharsets.UTF_8)) - - val destPath = "some/other path.txt" - val absoluteDestFile = basePath.resolve(Paths.get(destPath)) - - val sourcePathToUse = if (useRelativePath) sourceFile else absoluteSourceFile.toString - val files = NonEmptyList.of(CopyFile(name, Uri.Path(sourcePathToUse), Uri.Path(destPath))) - val expectedOutput = - CopyFileOutput(Uri.Path(sourcePathToUse), Uri.Path(destPath), absoluteSourceFile, absoluteDestFile) - - storage.copyFiles(name, files).accepted.rightValue shouldEqual NonEmptyList.of(expectedOutput) - - Files.exists(absoluteSourceFile) shouldEqual true - Files.exists(absoluteDestFile) shouldEqual true - Files.readString(absoluteDestFile, StandardCharsets.UTF_8) shouldEqual content - } - } - } - - "pass on file in different bucket specified for absolute/relative path" in { - forAll(List(true, false)) { useRelativePath => - val dest: AbsoluteDirectoryCreated = new AbsoluteDirectoryCreated {} - val source: AbsoluteDirectoryCreated = new AbsoluteDirectoryCreated {} - val sourceFile = sConfig.protectedDirectory.toString + "/my !file.txt" - val absoluteSourceFile = source.basePath.resolve(Paths.get(sourceFile)) - Files.createDirectories(absoluteSourceFile.getParent) - - val content = "some content" - Files.write(absoluteSourceFile, content.getBytes(StandardCharsets.UTF_8)) - - val destPath = "some/other path.txt" - val absoluteDestFile = dest.basePath.resolve(Paths.get(destPath)) - - val sourcePathToUse = if (useRelativePath) sourceFile else absoluteSourceFile.toString - val files = NonEmptyList.of(CopyFile(source.name, Uri.Path(sourcePathToUse), Uri.Path(destPath))) - val expectedOutput = - CopyFileOutput(Uri.Path(sourcePathToUse), Uri.Path(destPath), absoluteSourceFile, absoluteDestFile) - - Files.exists(absoluteSourceFile) shouldEqual true - - storage.copyFiles(dest.name, files).accepted shouldEqual Right(NonEmptyList.of(expectedOutput)) - - Files.exists(absoluteSourceFile) shouldEqual true - Files.exists(absoluteDestFile) shouldEqual true - Files.readString(absoluteDestFile, StandardCharsets.UTF_8) shouldEqual content - } - } - - } - - "fetching" should { - - implicit val pathExistsEvidence = PathExists - - "fail when it does not exists" in new RelativeDirectoryCreated { - storage.getFile(name, relativeFilePath).leftValue shouldEqual - PathNotFound(name, relativeFilePath) - } - - "pass with file with relative path" in new RelativeDirectoryCreated { - val content = "some content" - Files.write(absoluteFilePath, content.getBytes(StandardCharsets.UTF_8)) - val (resultSource, resultFilename) = storage.getFile(name, relativeFilePath).rightValue - resultFilename.value shouldEqual "file.txt" - resultSource.runWith(Sink.head).futureValue.decodeString(UTF_8) shouldEqual content - } - - "pass with file with absolute path" in new RelativeDirectoryCreated { - val content = "some content" - Files.write(absoluteFilePath, content.getBytes(StandardCharsets.UTF_8)) - val (resultSource, resultFilename) = storage.getFile(name, Uri.Path(absoluteFilePath.toString)).rightValue - resultFilename.value shouldEqual "file.txt" - resultSource.runWith(Sink.head).futureValue.decodeString(UTF_8) shouldEqual content - } - - "pass with directory with relative path" in new RelativeDirectoryCreated { - val content = "some content" - Files.write(absoluteFilePath, content.getBytes(StandardCharsets.UTF_8)) - val (resultSource, resultFilename) = storage.getFile(name, Uri.Path(relativeDir)).rightValue - resultFilename shouldEqual None - resultSource.runFold("")(_ ++ _.utf8String).futureValue should include(content) - } - - "pass with directory with absolute path" in new RelativeDirectoryCreated { - val content = "some content" - Files.write(absoluteFilePath, content.getBytes(StandardCharsets.UTF_8)) - val (resultSource, resultFilename) = storage.getFile(name, Uri.Path(absoluteDir.toString)).rightValue - resultFilename shouldEqual None - resultSource.runFold("")(_ ++ _.utf8String).futureValue should include(content) - } - } - - "fetching attributes" should { - - implicit val pathExistsEvidence = PathExists - - "fail when it does not exists" in new RelativeDirectoryCreated { - storage.getFile(name, relativeFilePath).leftValue shouldEqual - PathNotFound(name, relativeFilePath) - } - - "return the attributes specifying relative path" in new RelativeDirectoryCreated { - val content = "some content" - Files.write(absoluteFilePath, content.getBytes(StandardCharsets.UTF_8)) - val expectedAttributes = FileAttributes( - s"file://$absoluteFilePath", - content.size.toLong, - Digest(alg, randomString()), - `text/plain(UTF-8)` - ) - cache.get(absoluteFilePath) shouldReturn IO(expectedAttributes) - storage.getAttributes(name, relativeFilePath).accepted shouldEqual expectedAttributes - storage.getAttributes(name, Uri.Path(absoluteFilePath.toString)).accepted shouldEqual expectedAttributes - } - } - } - -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLoggerSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLoggerSpec.scala deleted file mode 100644 index e16dec2253..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/StringProcessLoggerSpec.scala +++ /dev/null @@ -1,27 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import ch.epfl.bluebrain.nexus.testkit.scalatest.BaseSpec - -import scala.sys.process._ - -class StringProcessLoggerSpec extends BaseSpec { - "A StringProcessLogger" should { - "log stdout" in { - val cmd = List("echo", "-n", "Hello", "world!") - val process = Process(cmd) - val logger = StringProcessLogger(cmd) - val exitCode = process ! logger - exitCode shouldEqual 0 - logger.toString shouldEqual "Hello world!" - } - - "log stderr" in { - val cmd = List("cat", "/") - val process = Process(cmd) - val logger = StringProcessLogger(cmd) - val exitCode = process ! logger - exitCode should not be 0 - logger.toString shouldEqual "cat: /: Is a directory" - } - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/TarFlowSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/TarFlowSpec.scala deleted file mode 100644 index b32f62cf1f..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/TarFlowSpec.scala +++ /dev/null @@ -1,82 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage - -import akka.actor.ActorSystem -import akka.stream.alpakka.file.scaladsl.Directory -import akka.stream.scaladsl.{FileIO, Source} -import akka.testkit.TestKit -import akka.util.ByteString -import ch.epfl.bluebrain.nexus.storage.utils.Randomness -import org.apache.commons.compress.archivers.tar.TarArchiveInputStream -import org.scalatest.concurrent.ScalaFutures.convertScalaFuture -import org.scalatest.matchers.should.Matchers -import org.scalatest.wordspec.AnyWordSpecLike -import org.scalatest.{BeforeAndAfterAll, Inspectors, OptionValues} - -import java.io.ByteArrayInputStream -import java.nio.file.{Files, Path, Paths} -import scala.annotation.tailrec -import scala.reflect.io.{Directory => ScalaDirectory} - -class TarFlowSpec - extends TestKit(ActorSystem("TarFlowSpec")) - with AnyWordSpecLike - with Matchers - with Randomness - with OptionValues - with Inspectors - with BeforeAndAfterAll { - - val basePath = Files.createTempDirectory("tarflow") - val dir1 = basePath.resolve("one") - val dir2 = basePath.resolve("two") - - override def afterAll(): Unit = { - super.afterAll() - ScalaDirectory(basePath.toFile).deleteRecursively() - () - } - - type PathAndContent = (Path, String) - - "A TarFlow" should { - - Files.createDirectories(dir1) - Files.createDirectories(dir2) - - def relativize(path: Path): String = basePath.getParent().relativize(path).toString - - "generate the byteString for a tar file correctly" in { - val file1 = dir1.resolve("file1.txt") - val file1Content = randomString() - val file2 = dir1.resolve("file3.txt") - val file2Content = randomString() - val file3 = dir2.resolve("file3.txt") - val file3Content = randomString() - val files = List(file1 -> file1Content, file2 -> file2Content, file3 -> file3Content) - forAll(files) { case (file, content) => - Source.single(ByteString(content)).runWith(FileIO.toPath(file)).futureValue - } - val byteString = Directory.walk(basePath).via(TarFlow.writer(basePath)).runReduce(_ ++ _).futureValue - val bytes = new ByteArrayInputStream(byteString.toArray) - val tar = new TarArchiveInputStream(bytes) - - @tailrec def readEntries( - tar: TarArchiveInputStream, - entries: List[PathAndContent] = Nil - ): List[PathAndContent] = { - val entry = tar.getNextEntry - if (entry == null) entries - else { - val data = Array.ofDim[Byte](entry.getSize.toInt) - tar.read(data) - readEntries(tar, (Paths.get(entry.getName) -> ByteString(data).utf8String) :: entries) - } - } - val directories = List(relativize(basePath) -> "", relativize(dir1) -> "", relativize(dir2) -> "") - val untarred = readEntries(tar).map { case (path, content) => path.toString -> content } - val expected = files.map { case (path, content) => relativize(path) -> content } ++ directories - untarred should contain theSameElementsAs expected - } - } - -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala deleted file mode 100644 index 0b89b5b59e..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesCacheSpec.scala +++ /dev/null @@ -1,161 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.attributes - -import akka.actor.ActorSystem -import akka.http.scaladsl.model.MediaTypes.{`application/octet-stream`, `image/jpeg`} -import akka.testkit.TestKit -import akka.util.Timeout -import cats.effect.{IO, Ref} -import cats.syntax.all._ -import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} -import ch.epfl.bluebrain.nexus.storage._ -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.DigestConfig -import ch.epfl.bluebrain.nexus.storage.utils.Randomness -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import org.scalactic.source.Position -import org.scalatest.concurrent.Eventually -import org.scalatest.{BeforeAndAfter, Inspectors} - -import java.nio.file.{Path, Paths} -import java.time.{Clock, Instant, ZoneId} -import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ - -class AttributesCacheSpec - extends TestKit(ActorSystem("AttributesCacheSpec")) - with CatsEffectSpec - with BeforeAndAfter - with Inspectors - with Randomness - with Eventually { - - implicit override def patienceConfig: PatienceConfig = PatienceConfig(20.second, 100.milliseconds) - - implicit val config: DigestConfig = - DigestConfig("SHA-256", maxInMemory = 10, concurrentComputations = 3, 20, 5.seconds) - - implicit val timeout: Timeout = Timeout(1.minute) - implicit val executionContext: ExecutionContext = ExecutionContext.global - - trait Ctx { - val path: Path = Paths.get(randomString()) - val digest = Digest(config.algorithm, randomString()) - val attributes = FileAttributes(s"file://$path", genInt().toLong, digest, `image/jpeg`) - def attributesEmpty(p: Path = path) = FileAttributes(p.toAkkaUri, 0L, Digest.empty, `application/octet-stream`) - val computedPaths = Ref.unsafe[IO, Map[Path, Int]](Map.empty[Path, Int]) - - def computedPathsSize: IO[Int] = computedPaths.get.map(_.size) - - def wasCalledOnce(path: Path)(implicit pos: Position) = computedPaths.get - .map { - _.get(path) - } - .accepted - .value shouldEqual 1 - - implicit val clock: Clock = new Clock { - override def getZone: ZoneId = ZoneId.systemDefault() - override def withZone(zoneId: ZoneId): Clock = Clock.systemUTC() - // For every attribute computation done, it passes one second - override def instant(): Instant = Instant.EPOCH - } - - val defaultComputation: AttributesComputation[String] = (path: Path, _: String) => - computedPaths.update(_.updatedWith(path)(_.fold(1)(_ + 1).some)).as(attributes) - - def computedAttributes(path: Path, algorithm: String): FileAttributes = { - val digest = Digest(algorithm, "COMPUTED") - FileAttributes(path.toAkkaUri, 42L, digest, `image/jpeg`) - } - - } - - "An AttributesCache" should { - - "trigger a computation and fetch file after" in new Ctx { - implicit val computation: AttributesComputation[String] = defaultComputation - val attributesCache = AttributesCache[String] - attributesCache.asyncComputePut(path, config.algorithm) - eventually { computedPathsSize.accepted shouldEqual 1 } - wasCalledOnce(path) - attributesCache.get(path).accepted shouldEqual attributes - wasCalledOnce(path) - } - - "get file that triggers attributes computation" in new Ctx { - implicit val computation: AttributesComputation[String] = defaultComputation - val attributesCache = AttributesCache[String] - attributesCache.get(path).accepted shouldEqual attributesEmpty() - eventually(computedPathsSize.accepted shouldEqual 1) - wasCalledOnce(path) - attributesCache.get(path).accepted shouldEqual attributes - wasCalledOnce(path) - } - - "verify 2 concurrent computations" in new Ctx { - val list = List.tabulate(10) { i => Paths.get(i.toString) } - val time = System.currentTimeMillis() - - implicit val delayedComputation: AttributesComputation[String] = (path: Path, algorithm: String) => - IO.sleep(1000.millis) >> defaultComputation(path, algorithm) >> IO.pure(computedAttributes(path, algorithm)) - val attributesCache = AttributesCache[String] - - forAll(list) { path => - attributesCache.get(path).accepted shouldEqual attributesEmpty(path) - } - - eventually(computedPathsSize.accepted shouldEqual 10) - - forAll(list) { path => wasCalledOnce(path) } - - val diff = System.currentTimeMillis() - time - diff should be > 4000L - diff should be < 6500L - - forAll(list) { path => - attributesCache.get(path).accepted shouldEqual computedAttributes(path, config.algorithm) - } - } - - "verify remove oldest" in new Ctx { - val list = List.tabulate(20) { i => Paths.get(i.toString) } - - implicit val computation: AttributesComputation[String] = (path: Path, algorithm: String) => - defaultComputation(path, algorithm) >> IO.pure(computedAttributes(path, algorithm)) - val attributesCache = AttributesCache[String] - - forAll(list) { path => - attributesCache.get(path).accepted shouldEqual attributesEmpty(path) - } - - eventually(computedPathsSize.accepted shouldEqual 20) - - forAll(list.takeRight(10)) { path => - attributesCache.get(path).accepted shouldEqual computedAttributes(path, config.algorithm) - } - - forAll(list.take(10)) { path => - attributesCache.get(path).accepted shouldEqual attributesEmpty(path) - } - } - - "verify failure is skipped" in new Ctx { - val list = List.tabulate(5) { i => Paths.get(i.toString) } - - implicit val computation: AttributesComputation[String] = (path: Path, algorithm: String) => { - if (path.endsWith("0")) - IO.raiseError(new RuntimeException) - else - defaultComputation(path, algorithm) >> IO.pure(computedAttributes(path, algorithm)) - } - val attributesCache = AttributesCache[String] - - forAll(list) { path => attributesCache.get(path).accepted shouldEqual attributesEmpty(path) } - - forAll(list.drop(1)) { path => - eventually( - attributesCache.get(path).accepted shouldEqual computedAttributes(path, config.algorithm) - ) - } - } - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputationSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputationSpec.scala deleted file mode 100644 index c92780ecee..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/AttributesComputationSpec.scala +++ /dev/null @@ -1,49 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.attributes - -import akka.actor.ActorSystem -import akka.http.scaladsl.model.ContentTypes.`text/plain(UTF-8)` -import akka.testkit.TestKit -import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig -import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} -import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec - -import java.nio.charset.StandardCharsets -import java.nio.file.{Files, Paths} -import scala.concurrent.ExecutionContextExecutor - -class AttributesComputationSpec extends TestKit(ActorSystem("AttributesComputationSpec")) with CatsEffectSpec { - - implicit private val ec: ExecutionContextExecutor = system.dispatcher - implicit val contentTypeDetector: ContentTypeDetector = new ContentTypeDetector(MediaTypeDetectorConfig.Empty) - - private trait Ctx { - val path = Files.createTempFile("storage-test", ".txt") - val (text, digest) = "something" -> "3fc9b689459d738f8c88a3a48aa9e33542016b7a4052e001aaa536fca74813cb" - } - - "Attributes computation computation" should { - val computation = AttributesComputation.akkaAttributes - val alg = "SHA-256" - - "succeed" in new Ctx { - Files.write(path, text.getBytes(StandardCharsets.UTF_8)) - computation(path, alg).accepted shouldEqual FileAttributes( - s"file://$path", - Files.size(path), - Digest(alg, digest), - `text/plain(UTF-8)` - ) - Files.deleteIfExists(path) - } - - "fail when algorithm is wrong" in new Ctx { - Files.write(path, text.getBytes(StandardCharsets.UTF_8)) - computation(path, "wrong-alg").rejectedWith[InternalError] - } - - "fail when file does not exists" in new Ctx { - computation(Paths.get("/tmp/non/existing"), alg).rejectedWith[InternalError] - } - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetectorSuite.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetectorSuite.scala deleted file mode 100644 index c9a9815413..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/attributes/ContentTypeDetectorSuite.scala +++ /dev/null @@ -1,39 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.attributes - -import akka.http.scaladsl.model.HttpCharsets.`UTF-8` -import akka.http.scaladsl.model.{ContentType, ContentTypes, MediaTypes} -import ch.epfl.bluebrain.nexus.delta.kernel.http.MediaTypeDetectorConfig -import munit.FunSuite - -import java.nio.file.Paths - -class ContentTypeDetectorSuite extends FunSuite { - - private val jsonPath = Paths.get("content-type/file-example.json") - private val noExtensionPath = Paths.get("content-type/no-extension") - - test("Detect 'application/json' as content type") { - val detector = new ContentTypeDetector(MediaTypeDetectorConfig.Empty) - val expected = ContentTypes.`application/json` - assertEquals(detector(jsonPath, isDir = false), expected) - } - - test("Detect overridden content type") { - val customMediaType = MediaTypes.`application/vnd.api+json` - val detector = new ContentTypeDetector(MediaTypeDetectorConfig("json" -> MediaTypes.`application/vnd.api+json`)) - val expected = ContentType(customMediaType, () => `UTF-8`) - assertEquals(detector(jsonPath, isDir = false), expected) - } - - test("Detect `application/octet-stream` as a default value") { - val detector = new ContentTypeDetector(MediaTypeDetectorConfig("json" -> MediaTypes.`application/vnd.api+json`)) - val expected = ContentTypes.`application/octet-stream` - assertEquals(detector(noExtensionPath, isDir = false), expected) - } - - test("Detect `application/x-tar` when the flag directory is set") { - val detector = new ContentTypeDetector(MediaTypeDetectorConfig("json" -> MediaTypes.`application/vnd.api+json`)) - val expected = ContentType(MediaTypes.`application/x-tar`, () => `UTF-8`) - assertEquals(detector(noExtensionPath, isDir = true), expected) - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala deleted file mode 100644 index a2021ba0d7..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/auth/AuthorizationMethodSuite.scala +++ /dev/null @@ -1,137 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.auth - -import cats.data.NonEmptySet -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod._ -import ch.epfl.bluebrain.nexus.storage.utils.Randomness -import com.nimbusds.jose.jwk.gen.RSAKeyGenerator -import com.nimbusds.jose.jwk.{JWK, JWKSet, RSAKey} -import munit.FunSuite -import pureconfig.ConfigSource - -import scala.jdk.CollectionConverters._ - -class AuthorizationMethodSuite extends FunSuite with Randomness { - - private def generateKey: RSAKey = new RSAKeyGenerator(2048).keyID(randomString()).generate() - - private def parseConfig(value: String) = - ConfigSource.string(value).at("authorization").load[AuthorizationMethod] - - test("Parse successfully for the anonymous method") { - val config = parseConfig( - """ - |authorization { - | type = anonymous - |} - |""".stripMargin - ) - assertEquals(config, Right(Anonymous)) - } - - test("Parse successfully for the verify token method") { - val key1: JWK = generateKey.toPublicJWK - val key2: JWK = generateKey.toPublicJWK - - val config = parseConfig( - s""" - |authorization { - | type = verify-token - | issuer = bbp - | subject = admin - | audiences = [dev, staging] - | keys = [ ${key1.toJSONString}, ${key2.toJSONString}] - |} - |""".stripMargin - ) - - val expectedAudiences = Some(NonEmptySet.of("dev", "staging")) - val expectedKeySet = new JWKSet(List(key1, key2).asJava) - val expected = VerifyToken("bbp", "admin", expectedAudiences, expectedKeySet) - - assertEquals(config, Right(expected)) - } - - test("Parse successfully without audiences") { - val key1: JWK = generateKey.toPublicJWK - - val config = parseConfig( - s""" - |authorization { - | type = verify-token - | issuer = bbp - | subject = admin - | keys = [ ${key1.toJSONString} ] - |} - |""".stripMargin - ) - - val expectedAudiences = None - val expectedKeySet = new JWKSet(key1) - val expected = VerifyToken("bbp", "admin", expectedAudiences, expectedKeySet) - - assertEquals(config, Right(expected)) - } - - test("Fail to parse the config if the issuer is missing") { - val key1: JWK = generateKey.toPublicJWK - - val config = parseConfig( - s""" - |authorization { - | type = verify-token - | subject = admin - | keys = [ ${key1.toJSONString} ] - |} - |""".stripMargin - ) - - assert(config.isLeft, "Parsing must fail with an missing issuer") - } - - test("Fail to parse the config if the subject is missing") { - val key1: JWK = generateKey.toPublicJWK - - val config = parseConfig( - s""" - |authorization { - | type = verify-token - | issuer = bbp - | keys = [ ${key1.toJSONString} ] - |} - |""".stripMargin - ) - - assert(config.isLeft, "Parsing must fail with an missing subject") - } - - test("Fail to parse the config if the key is invalid") { - val config = parseConfig( - s""" - |authorization { - | type = verify-token - | issuer = bbp - | subject = admin - | keys = [ "xxx" ] - |} - |""".stripMargin - ) - - assert(config.isLeft, "Parsing must fail with an invalid key") - } - - test("Fail to parse the config without a key") { - val config = parseConfig( - s""" - |authorization { - | type = verify-token - | issuer = bbp - | subject = admin - | keys = [ ] - |} - |""".stripMargin - ) - - assert(config.isLeft, "Parsing must fail without a key") - } - -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala deleted file mode 100644 index 335a744724..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AppInfoRoutesSpec.scala +++ /dev/null @@ -1,33 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.StatusCodes._ -import akka.http.scaladsl.server.Route -import akka.http.scaladsl.testkit.ScalatestRouteTest -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod -import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings} -import ch.epfl.bluebrain.nexus.storage.routes.instances._ -import ch.epfl.bluebrain.nexus.storage.utils.Resources -import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import io.circe.Json -import org.mockito.IdiomaticMockito - -import java.util.regex.Pattern.quote - -class AppInfoRoutesSpec extends CatsEffectSpec with ScalatestRouteTest with IdiomaticMockito with Resources { - - "the app info routes" should { - - implicit val config: AppConfig = Settings(system).appConfig - implicit val authorizationMethod: AuthorizationMethod = AuthorizationMethod.Anonymous - val route: Route = Routes(mock[Storages[AkkaSource]]) - - "return application information" in { - Get("/") ~> route ~> check { - status shouldEqual OK - responseAs[Json] shouldEqual - jsonContentOf("app-info.json", Map(quote("{version}") -> config.description.version)) - } - } - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala deleted file mode 100644 index 99f7fb2942..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/AuthDirectivesSpec.scala +++ /dev/null @@ -1,107 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.StatusCodes -import akka.http.scaladsl.model.headers.OAuth2BearerToken -import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.testkit.ScalatestRouteTest -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod.VerifyToken -import ch.epfl.bluebrain.nexus.storage.config.AppConfig.HttpConfig -import ch.epfl.bluebrain.nexus.storage.config.Settings -import ch.epfl.bluebrain.nexus.storage.routes.AuthDirectives._ -import ch.epfl.bluebrain.nexus.testkit.jwt.TokenGenerator -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import com.nimbusds.jose.jwk.gen.RSAKeyGenerator -import com.nimbusds.jose.jwk.{JWKSet, RSAKey} -import org.scalatest.BeforeAndAfter - -import java.time.Instant - -//noinspection NameBooleanParameters -class AuthDirectivesSpec extends CatsEffectSpec with BeforeAndAfter with ScalatestRouteTest { - - implicit private val hc: HttpConfig = Settings(system).appConfig.http - - def validateRoute(implicit authorizationMethod: AuthorizationMethod) = Routes.wrap(validUser.apply { - complete("") - }) - - "Validating with the anonymous method" should { - - implicit val anonymousMethod: AuthorizationMethod = AuthorizationMethod.Anonymous - "validate any token" in { - val expected = "token" - Get("/").addCredentials(OAuth2BearerToken(expected)) ~> validateRoute ~> check { - status shouldEqual StatusCodes.OK - } - } - "validate if no token is provided" in { - Get("/") ~> validateRoute ~> check { - status shouldEqual StatusCodes.OK - } - } - } - - "Validating with the verify token method" should { - - def generateKey: RSAKey = new RSAKeyGenerator(2048).keyID(genString()).generate() - - val rsaKey = generateKey - val validIssuer = "bbp" - val validSubject = "admin" - - def generateToken(subject: String, issuer: String, rsaKey: RSAKey) = - TokenGenerator - .generateToken( - subject, - issuer, - rsaKey, - Instant.now().plusSeconds(100L), - Instant.now().minusSeconds(100L), - None, - None, - false, - Some(subject) - ) - .value - - implicit val verifyTokenMethod: AuthorizationMethod = - VerifyToken(validIssuer, validSubject, None, new JWKSet(rsaKey.toPublicJWK)) - - "Succeed with a valid token" in { - val token = generateToken(validSubject, validIssuer, rsaKey) - Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { - status shouldEqual StatusCodes.OK - } - } - - "Fail with an invalid issuer" in { - val token = generateToken(validSubject, "xxx", rsaKey) - Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { - status shouldEqual StatusCodes.Unauthorized - } - } - - "Fail with an invalid subject" in { - val token = generateToken("bob", validIssuer, rsaKey) - Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { - status shouldEqual StatusCodes.Unauthorized - } - } - - "Fail with a token signed with another key" in { - val anotherKey: RSAKey = generateKey - val token = generateToken(validSubject, validIssuer, anotherKey) - Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { - status shouldEqual StatusCodes.Unauthorized - } - } - - "Fail with an invalid token" in { - val token = "token" - Get("/").addCredentials(OAuth2BearerToken(token)) ~> validateRoute ~> check { - status shouldEqual StatusCodes.Unauthorized - } - } - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectivesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectivesSpec.scala deleted file mode 100644 index 18bf9536b1..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageDirectivesSpec.scala +++ /dev/null @@ -1,83 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.{StatusCodes, Uri} -import akka.http.scaladsl.server.Directives._ -import akka.http.scaladsl.testkit.ScalatestRouteTest -import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport._ -import ch.epfl.bluebrain.nexus.storage.routes.Routes.exceptionHandler -import ch.epfl.bluebrain.nexus.storage.routes.StorageDirectives._ -import ch.epfl.bluebrain.nexus.storage.utils.Resources -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import io.circe.Json - -import java.util.regex.Pattern.quote - -class StorageDirectivesSpec extends CatsEffectSpec with ScalatestRouteTest with Resources { - - "the storage directives" when { - - def pathInvalidJson(path: Uri.Path): Json = - jsonContentOf( - "error.json", - Map( - quote("{type}") -> "PathInvalid", - quote( - "{reason}" - ) -> s"The provided location inside the bucket 'name' with the path '$path' is invalid." - ) - ) - - "dealing with file path extraction" should { - val route = handleExceptions(exceptionHandler) { - (extractRelativeFilePath("name") & get) { path => - complete(s"$path") - } - } - - "reject when path contains 2 slashes" in { - Get("///") ~> route ~> check { - status shouldEqual StatusCodes.BadRequest - responseAs[Json] shouldEqual pathInvalidJson(Uri.Path.Empty) - } - } - - "reject when path does not end with a segment" in { - Get("/some/path/") ~> route ~> check { - status shouldEqual StatusCodes.BadRequest - responseAs[Json] shouldEqual pathInvalidJson(Uri.Path("some/path/")) - } - } - - "return path" in { - Get("/some/path/file.txt") ~> route ~> check { - responseAs[String] shouldEqual "some/path/file.txt" - } - } - } - - "dealing with path validation" should { - def route(path: Uri.Path) = - handleExceptions(exceptionHandler) { - (validatePath("name", path) & get) { - complete(s"$path") - } - } - - "reject when some of the segments is . or .." in { - val paths = List(Uri.Path("/./other/file.txt"), Uri.Path("/some/../file.txt")) - forAll(paths) { path => - Get(path.toString()) ~> route(path) ~> check { - status shouldEqual StatusCodes.BadRequest - responseAs[Json] shouldEqual pathInvalidJson(path) - } - } - } - - "pass" in { - Get("/some/path") ~> route(Uri.Path("/some/path")) ~> check { - handled shouldEqual true - } - } - } - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala deleted file mode 100644 index 89cb7a6f66..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/routes/StorageRoutesSpec.scala +++ /dev/null @@ -1,611 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.routes - -import akka.http.scaladsl.model.ContentTypes._ -import akka.http.scaladsl.model.MediaRanges._ -import akka.http.scaladsl.model.MediaTypes.{`application/octet-stream`, `image/jpeg`} -import akka.http.scaladsl.model.Multipart.FormData -import akka.http.scaladsl.model.Multipart.FormData.BodyPart -import akka.http.scaladsl.model.StatusCodes._ -import akka.http.scaladsl.model.headers.Accept -import akka.http.scaladsl.model.{HttpEntity, Uri} -import akka.http.scaladsl.server.Route -import akka.http.scaladsl.testkit.ScalatestRouteTest -import akka.stream.scaladsl.Source -import akka.util.ByteString -import cats.data.NonEmptyList -import cats.effect.IO -import ch.epfl.bluebrain.nexus.storage.File.{Digest, FileAttributes} -import ch.epfl.bluebrain.nexus.storage.Rejection.PathNotFound -import ch.epfl.bluebrain.nexus.storage.StorageError.InternalError -import ch.epfl.bluebrain.nexus.storage.Storages.BucketExistence.{BucketDoesNotExist, BucketExists} -import ch.epfl.bluebrain.nexus.storage.Storages.PathExistence.{PathDoesNotExist, PathExists} -import ch.epfl.bluebrain.nexus.storage.auth.AuthorizationMethod -import ch.epfl.bluebrain.nexus.storage.config.{AppConfig, Settings} -import ch.epfl.bluebrain.nexus.storage.files.CopyFileOutput -import ch.epfl.bluebrain.nexus.storage.jsonld.JsonLdContext.addContext -import ch.epfl.bluebrain.nexus.storage.routes.instances._ -import ch.epfl.bluebrain.nexus.storage.utils.{Randomness, Resources} -import ch.epfl.bluebrain.nexus.storage.{AkkaSource, Storages} -import ch.epfl.bluebrain.nexus.testkit.scalatest.ce.CatsEffectSpec -import io.circe.Json -import io.circe.syntax.KeyOps -import org.mockito.{ArgumentMatchersSugar, IdiomaticMockito} -import org.scalatest.concurrent.ScalaFutures - -import java.nio.file.Paths -import java.util.regex.Pattern.quote -import scala.concurrent.duration._ - -class StorageRoutesSpec - extends CatsEffectSpec - with ScalatestRouteTest - with IdiomaticMockito - with Randomness - with Resources - with ArgumentMatchersSugar - with ScalaFutures { - - implicit override def patienceConfig: PatienceConfig = PatienceConfig(3.second, 15.milliseconds) - - implicit val appConfig: AppConfig = Settings(system).appConfig - implicit val authorizationMethod: AuthorizationMethod = AuthorizationMethod.Anonymous - val storages: Storages[AkkaSource] = mock[Storages[AkkaSource]] - val route: Route = Routes(storages) - - trait Ctx { - val name = randomString() - val resourceCtx = "https://bluebrain.github.io/nexus/contexts/resource.json" - } - - trait RandomFile extends Ctx { - val filename = s"${randomString()}.json" - val content = Json.obj("key" -> Json.fromString(randomString())).noSpaces - val source: AkkaSource = Source.single(ByteString(content)) - implicit val bucketExists: BucketExists = BucketExists - implicit val pathExists: PathExists = PathExists - } - - trait RandomFileCreate extends RandomFile { - val entity: HttpEntity.Strict = HttpEntity(`application/json`, content) - val multipartForm = FormData(BodyPart.Strict("file", entity, Map("filename" -> filename))).toEntity() - val filePathString = s"path/to/file/$filename" - val filePath = Paths.get(filePathString) - val filePathUri = Uri.Path(s"path/to/file/$filename") - } - - "the storage routes" when { - - "accessing the check bucket endpoint" should { - - "fail when bucket check returns a rejection" in new Ctx { - storages.exists(name) shouldReturn BucketDoesNotExist - - Head(s"/v1/buckets/$name") ~> route ~> check { - status shouldEqual NotFound - storages.exists(name) wasCalled once - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "BucketNotFound", - quote("{reason}") -> s"The provided bucket '$name' does not exist." - ) - ) - } - } - - "pass" in new Ctx { - storages.exists(name) shouldReturn BucketExists - - Head(s"/v1/buckets/$name") ~> route ~> check { - status shouldEqual OK - storages.exists(name) wasCalled once - } - } - } - - "uploading a file" should { - - "fail when bucket does not exists" in new Ctx { - storages.exists(name) shouldReturn BucketDoesNotExist - - Put(s"/v1/buckets/$name/files/path/to/file/") ~> route ~> check { - status shouldEqual NotFound - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "BucketNotFound", - quote("{reason}") -> s"The provided bucket '$name' does not exist." - ) - ) - storages.exists(name) wasCalled once - } - } - - "fail when path already exists" in new RandomFileCreate { - storages.exists(name) shouldReturn BucketExists - storages.pathExists(name, filePathUri) shouldReturn PathExists - - Put(s"/v1/buckets/$name/files/path/to/file/$filename", multipartForm) ~> route ~> check { - status shouldEqual Conflict - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "PathAlreadyExists", - quote( - "{reason}" - ) -> s"The provided location inside the bucket '$name' with the path '$filePathUri' already exists." - ) - ) - storages.exists(name) wasCalled once - storages.pathExists(name, filePathUri) wasCalled once - } - } - - "fail when create file returns a exception" in new RandomFileCreate { - storages.exists(name) shouldReturn BucketExists - storages.pathExists(name, filePathUri) shouldReturn PathDoesNotExist - storages.createFile(eqTo(name), eqTo(filePathUri), any[AkkaSource])( - eqTo(BucketExists), - eqTo(PathDoesNotExist) - ) shouldReturn - IO.raiseError(InternalError("something went wrong")) - - Put(s"/v1/buckets/$name/files/path/to/file/$filename", multipartForm) ~> route ~> check { - status shouldEqual InternalServerError - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "InternalError", - quote("{reason}") -> s"The system experienced an unexpected error, please try again later." - ) - ) - storages.createFile(eqTo(name), eqTo(filePathUri), any[AkkaSource])( - eqTo(BucketExists), - eqTo(PathDoesNotExist) - ) wasCalled once - } - } - - "pass" in new RandomFileCreate { - val absoluteFilePath = appConfig.storage.rootVolume.resolve(filePath) - val digest = Digest("SHA-256", randomString()) - val attributes = FileAttributes(s"file://$absoluteFilePath", 12L, digest, `application/octet-stream`) - storages.exists(name) shouldReturn BucketExists - storages.pathExists(name, filePathUri) shouldReturn PathDoesNotExist - storages.createFile(eqTo(name), eqTo(filePathUri), any[AkkaSource])( - eqTo(BucketExists), - eqTo(PathDoesNotExist) - ) shouldReturn IO( - attributes - ) - - Put(s"/v1/buckets/$name/files/path/to/file/$filename", multipartForm) ~> route ~> check { - status shouldEqual Created - responseAs[Json] shouldEqual jsonContentOf( - "file-created.json", - Map( - quote("{location}") -> attributes.location.toString, - quote("{mediaType}") -> attributes.mediaType.value, - quote("{bytes}") -> attributes.bytes.toString, - quote("{algorithm}") -> attributes.digest.algorithm, - quote("{value}") -> attributes.digest.value - ) - ) - storages.createFile(eqTo(name), eqTo(filePathUri), any[AkkaSource])( - eqTo(BucketExists), - eqTo(PathDoesNotExist) - ) wasCalled once - } - } - } - - "linking a file" should { - - "fail when bucket does not exists" in new Ctx { - storages.exists(name) shouldReturn BucketDoesNotExist - - Put(s"/v1/buckets/$name/files/path/to/myfile.txt", jsonContentOf("file-link.json")) ~> route ~> check { - status shouldEqual NotFound - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "BucketNotFound", - quote("{reason}") -> s"The provided bucket '$name' does not exist." - ) - ) - storages.exists(name) wasCalled once - } - } - - "fail when move file returns a exception" in new Ctx { - storages.exists(name) shouldReturn BucketExists - val source = "source/dir" - val dest = "dest/dir" - storages.moveFile(name, Uri.Path(source), Uri.Path(dest))(BucketExists) shouldReturn - IO.raiseError(InternalError("something went wrong")) - - val json = jsonContentOf("file-link.json", Map(quote("{source}") -> source)) - - Put(s"/v1/buckets/$name/files/$dest", json) ~> route ~> check { - status shouldEqual InternalServerError - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "InternalError", - quote("{reason}") -> s"The system experienced an unexpected error, please try again later." - ) - ) - storages.moveFile(name, Uri.Path(source), Uri.Path(dest))(BucketExists) wasCalled once - } - } - - "fail with invalid source path" in new Ctx { - storages.exists(name) shouldReturn BucketExists - val source = "../dir" - val dest = "dest/dir" - - val json = jsonContentOf("file-link.json", Map(quote("{source}") -> source)) - - Put(s"/v1/buckets/$name/files/$dest", json) ~> route ~> check { - status shouldEqual BadRequest - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "PathInvalid", - quote( - "{reason}" - ) -> s"The provided location inside the bucket '$name' with the path '$source' is invalid." - ) - ) - } - } - - "pass" in new Ctx { - storages.exists(name) shouldReturn BucketExists - val source = "source/dir" - val dest = "dest/dir" - val attributes = FileAttributes(s"file://some/prefix/$dest", 12L, Digest.empty, `application/octet-stream`) - storages.moveFile(name, Uri.Path(source), Uri.Path(dest))(BucketExists) shouldReturn - IO.pure(Right(attributes)) - - val json = jsonContentOf("file-link.json", Map(quote("{source}") -> source)) - - Put(s"/v1/buckets/$name/files/$dest", json) ~> route ~> check { - status shouldEqual OK - responseAs[Json] shouldEqual jsonContentOf( - "file-created.json", - Map( - quote("{location}") -> attributes.location.toString, - quote("{mediaType}") -> attributes.mediaType.value, - quote("{bytes}") -> attributes.bytes.toString, - quote("{algorithm}") -> "", - quote("{value}") -> "" - ) - ) - - storages.moveFile(name, Uri.Path(source), Uri.Path(dest))(BucketExists) wasCalled once - } - } - } - - "copying a file" should { - - "fail when destination bucket does not exist" in new Ctx { - storages.exists(name) shouldReturn BucketDoesNotExist - - val json = - Json.arr(Json.obj("sourceBucket" := name, "source" := "source/dir", "destination" := "/path/to/myfile.txt")) - - Post(s"/v1/buckets/$name/files", json) ~> route ~> check { - status shouldEqual NotFound - responseAs[Json] shouldEqual jsonContentOf( - "/error.json", - Map( - quote("{type}") -> "BucketNotFound", - quote("{reason}") -> s"The provided bucket '$name' does not exist." - ) - ) - storages.exists(name) wasCalled once - } - } - - "fail when a source bucket does not exist" in new Ctx { - val sourceBucket = randomString() - storages.exists(name) shouldReturn BucketExists - storages.exists(sourceBucket) shouldReturn BucketDoesNotExist - - val json = Json.arr( - Json.obj("sourceBucket" := sourceBucket, "source" := "source/dir", "destination" := "/path/to/myfile.txt") - ) - - Post(s"/v1/buckets/$name/files", json) ~> route ~> check { - status shouldEqual NotFound - responseAs[Json] shouldEqual jsonContentOf( - "/error.json", - Map( - quote("{type}") -> "BucketNotFound", - quote("{reason}") -> s"The provided bucket '$sourceBucket' does not exist." - ) - ) - storages.exists(name) wasCalled once - storages.exists(sourceBucket) wasCalled once - } - } - - "fail if an empty array is passed" in new Ctx { - storages.exists(name) shouldReturn BucketExists - - Post(s"/v1/buckets/$name/files", Json.arr()) ~> route ~> check { - status shouldEqual BadRequest - storages.exists(name) wasCalled once - } - } - - "fail when copy file returns a exception" in new Ctx { - storages.exists(name) shouldReturn BucketExists - val source = "source/dir" - val dest = "dest/dir" - storages.pathExists(name, Uri.Path(dest)) shouldReturn PathDoesNotExist - val input = NonEmptyList.of(CopyFile(name, Uri.Path(source), Uri.Path(dest))) - storages.copyFiles(name, input)(BucketExists, PathDoesNotExist) shouldReturn - IO.raiseError(InternalError("something went wrong")) - - val json = Json.arr(Json.obj("sourceBucket" := name, "source" := source, "destination" := dest)) - - Post(s"/v1/buckets/$name/files", json) ~> route ~> check { - status shouldEqual InternalServerError - responseAs[Json] shouldEqual jsonContentOf( - "/error.json", - Map( - quote("{type}") -> "InternalError", - quote("{reason}") -> s"The system experienced an unexpected error, please try again later." - ) - ) - storages.copyFiles(name, input)(BucketExists, PathDoesNotExist) wasCalled once - } - } - - "fail with invalid source path" in new Ctx { - storages.exists(name) shouldReturn BucketExists - val source = "../dir" - val dest = "dest/dir" - storages.pathExists(name, Uri.Path(dest)) shouldReturn PathDoesNotExist - - val json = Json.arr(Json.obj("sourceBucket" := name, "source" := source, "destination" := dest)) - - Post(s"/v1/buckets/$name/files", json) ~> route ~> check { - status shouldEqual BadRequest - responseAs[Json] shouldEqual jsonContentOf( - "/error.json", - Map( - quote("{type}") -> "PathInvalid", - quote( - "{reason}" - ) -> s"The provided location inside the bucket '$name' with the path '$source' is invalid." - ) - ) - } - } - - "pass" in new Ctx { - val sourceBucket = randomString() - storages.exists(name) shouldReturn BucketExists - storages.exists(sourceBucket) shouldReturn BucketExists - val source = "source/dir" - val dest = "dest/dir" - val output = - CopyFileOutput(Uri.Path(source), Uri.Path(dest), Paths.get(s"/rootdir/$source"), Paths.get(s"/rootdir/$dest")) - storages.pathExists(name, Uri.Path(dest)) shouldReturn PathDoesNotExist - storages.copyFiles(name, NonEmptyList.of(CopyFile(sourceBucket, Uri.Path(source), Uri.Path(dest))))( - BucketExists, - PathDoesNotExist - ) shouldReturn - IO.pure(Right(NonEmptyList.of(output))) - - val json = Json.arr(Json.obj("sourceBucket" := sourceBucket, "source" := source, "destination" := dest)) - val response = Json.arr( - Json.obj( - "sourcePath" := source, - "destinationPath" := dest, - "absoluteSourceLocation" := s"/rootdir/$source", - "absoluteDestinationLocation" := s"/rootdir/$dest" - ) - ) - - Post(s"/v1/buckets/$name/files", json) ~> route ~> check { - status shouldEqual Created - responseAs[Json] shouldEqual response - - storages.copyFiles(name, NonEmptyList.of(CopyFile(sourceBucket, Uri.Path(source), Uri.Path(dest))))( - BucketExists, - PathDoesNotExist - ) wasCalled once - } - } - - "fail if a destination path exists" in new Ctx { - storages.exists(name) shouldReturn BucketExists - val source = "source/dir" - val dest = "dest/dir" - val existingDest = "dest2/dir" - storages.pathExists(name, Uri.Path(dest)) shouldReturn PathDoesNotExist - storages.pathExists(name, Uri.Path(existingDest)) shouldReturn PathExists - - val json = Json.arr( - Json.obj("sourceBucket" := name, "source" := source, "destination" := dest), - Json.obj("sourceBucket" := name, "source" := source, "destination" := existingDest) - ) - - Post(s"/v1/buckets/$name/files", json) ~> route ~> check { - status shouldEqual Conflict - } - } - - "fail if a source path is invalid" in new Ctx { - storages.exists(name) shouldReturn BucketExists - val source = "source/dir" - val invalidSource = "../dir" - val dest = "dest/dir" - storages.pathExists(name, Uri.Path(dest)) shouldReturn PathDoesNotExist - - val json = Json.arr( - Json.obj("sourceBucket" := name, "source" := source, "destination" := dest), - Json.obj("sourceBucket" := name, "source" := invalidSource, "destination" := dest) - ) - - Post(s"/v1/buckets/$name/files", json) ~> route ~> check { - status shouldEqual BadRequest - } - } - } - - "downloading a file" should { - - "fail when the path does not exists" in new RandomFile { - val filePathUri = Uri.Path(s"$filename") - storages.exists(name) shouldReturn BucketExists - storages.pathExists(name, filePathUri) shouldReturn PathDoesNotExist - - Get(s"/v1/buckets/$name/files/$filename") ~> Accept(`*/*`) ~> route ~> check { - status shouldEqual NotFound - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "PathNotFound", - quote( - "{reason}" - ) -> s"The provided location inside the bucket '$name' with the path '$filePathUri' does not exist." - ) - ) - storages.pathExists(name, filePathUri) wasCalled once - } - } - - "fail when get file returns a rejection" in new RandomFile { - val filePathUri = Uri.Path(s"$filename") - storages.exists(name) shouldReturn BucketExists - storages.pathExists(name, filePathUri) shouldReturn PathExists - storages.getFile(name, filePathUri) shouldReturn Left(PathNotFound(name, filePathUri)) - - Get(s"/v1/buckets/$name/files/$filename") ~> Accept(`*/*`) ~> route ~> check { - status shouldEqual NotFound - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "PathNotFound", - quote( - "{reason}" - ) -> s"The provided location inside the bucket '$name' with the path '$filePathUri' does not exist." - ) - ) - storages.getFile(name, filePathUri) wasCalled once - } - } - - "pass on file" in new RandomFile { - val filePathUri = Uri.Path(s"$filename") - storages.exists(name) shouldReturn BucketExists - storages.getFile(name, filePathUri) shouldReturn Right(source -> Option(filename)) - storages.pathExists(name, filePathUri) shouldReturn PathExists - - Get(s"/v1/buckets/$name/files/$filename") ~> Accept(`*/*`) ~> route ~> check { - status shouldEqual OK - contentType.value shouldEqual "application/octet-stream" - responseEntity.dataBytes.runFold("")(_ ++ _.utf8String).futureValue shouldEqual content - storages.getFile(name, filePathUri) wasCalled once - } - } - - "pass on directory" in new RandomFile { - val directory = "some/dir/" - val directoryUri = Uri.Path(s"$directory") - storages.exists(name) shouldReturn BucketExists - storages.getFile(name, directoryUri) shouldReturn Right(source -> None) - storages.pathExists(name, directoryUri) shouldReturn PathExists - - Get(s"/v1/buckets/$name/files/$directory") ~> Accept(`*/*`) ~> route ~> check { - status shouldEqual OK - contentType.value shouldEqual "application/x-tar" - responseEntity.dataBytes.runFold("")(_ ++ _.utf8String).futureValue shouldEqual content - storages.getFile(name, directoryUri) wasCalled once - } - } - } - - "fetching the file attributes" should { - - "fail when the path does not exists" in new RandomFile { - val filePathUri = Uri.Path(s"$filename") - storages.exists(name) shouldReturn BucketExists - storages.pathExists(name, filePathUri) shouldReturn PathDoesNotExist - - Get(s"/v1/buckets/$name/attributes/$filename") ~> Accept(`*/*`) ~> route ~> check { - status shouldEqual NotFound - responseAs[Json] shouldEqual jsonContentOf( - "error.json", - Map( - quote("{type}") -> "PathNotFound", - quote( - "{reason}" - ) -> s"The provided location inside the bucket '$name' with the path '$filePathUri' does not exist." - ) - ) - storages.pathExists(name, filePathUri) wasCalled once - } - } - - "return attributes" in new RandomFile { - val filePathUri = Uri.Path(s"$filename") - storages.exists(name) shouldReturn BucketExists - val attributes = - FileAttributes(s"file://$filePathUri", genInt().toLong, Digest("SHA-256", randomString()), `image/jpeg`) - storages.getAttributes(name, filePathUri) shouldReturn IO(attributes) - storages.pathExists(name, filePathUri) shouldReturn PathExists - - Get(s"/v1/buckets/$name/attributes/$filename") ~> Accept(`*/*`) ~> route ~> check { - status shouldEqual OK - val digestJson = Json.obj( - "_algorithm" -> Json.fromString(attributes.digest.algorithm), - "_value" -> Json.fromString(attributes.digest.value) - ) - responseAs[Json] shouldEqual addContext( - Json - .obj( - "_bytes" -> Json.fromLong(attributes.bytes), - "_digest" -> digestJson, - "_location" -> Json.fromString(attributes.location.toString()), - "_mediaType" -> Json.fromString(attributes.mediaType.toString) - ), - resourceCtx - ) - storages.getAttributes(name, filePathUri) wasCalled once - } - } - - "return empty attributes" in new RandomFile { - val filePathUri = Uri.Path(s"$filename") - storages.exists(name) shouldReturn BucketExists - storages.getAttributes(name, filePathUri) shouldReturn IO( - FileAttributes(s"file://$filePathUri", 0L, Digest.empty, `application/octet-stream`) - ) - storages.pathExists(name, filePathUri) shouldReturn PathExists - - Get(s"/v1/buckets/$name/attributes/$filename") ~> Accept(`*/*`) ~> route ~> check { - status shouldEqual Accepted - val digestJson = Json.obj("_algorithm" -> Json.fromString(""), "_value" -> Json.fromString("")) - responseAs[Json] shouldEqual addContext( - Json - .obj( - "_bytes" -> Json.fromLong(0L), - "_digest" -> digestJson, - "_location" -> Json.fromString(s"file://${filePathUri.toString().toLowerCase}"), - "_mediaType" -> Json.fromString(`application/octet-stream`.toString()) - ), - resourceCtx - ) - storages.getAttributes(name, filePathUri) wasCalled once - } - } - } - } -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Randomness.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Randomness.scala deleted file mode 100644 index 9789cc66f3..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Randomness.scala +++ /dev/null @@ -1,11 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.utils - -import scala.util.Random - -trait Randomness { - - def randomString(size: Int = 10): String = Random.alphanumeric.take(size).mkString - - final def genInt(max: Int = 100): Int = Random.nextInt(max) - -} diff --git a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Resources.scala b/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Resources.scala deleted file mode 100644 index f989b1cb67..0000000000 --- a/storage/src/test/scala/ch/epfl/bluebrain/nexus/storage/utils/Resources.scala +++ /dev/null @@ -1,68 +0,0 @@ -package ch.epfl.bluebrain.nexus.storage.utils - -import io.circe.Json -import io.circe.parser.parse - -import scala.io.Source - -/** - * Utility trait that facilitates operating on classpath resources. - */ -trait Resources { - - /** - * Loads the content of the argument classpath resource as a string. - * - * @param resourcePath - * the path of a resource available on the classpath - * @return - * the content of the referenced resource as a string - */ - final def contentOf(resourcePath: String): String = { - val fromClass = Option(getClass.getResourceAsStream(resourcePath)) - val fromClassLoader = Option(getClass.getClassLoader.getResourceAsStream(resourcePath)) - val is = (fromClass orElse fromClassLoader).getOrElse( - throw new IllegalArgumentException(s"Unable to load resource '$resourcePath' from classpath.") - ) - Source.fromInputStream(is).mkString - } - - /** - * Loads the content of the argument classpath resource as a string and replaces all the key matches of the - * ''replacements'' with their values. - * - * @param resourcePath - * the path of a resource available on the classpath - * @return - * the content of the referenced resource as a string - */ - final def contentOf(resourcePath: String, replacements: Map[String, String]): String = - replacements.foldLeft(contentOf(resourcePath)) { case (value, (regex, replacement)) => - value.replaceAll(regex, replacement) - } - - /** - * Loads the content of the argument classpath resource as a json value. - * - * @param resourcePath - * the path of a resource available on the classpath - * @return - * the content of the referenced resource as a json value - */ - @SuppressWarnings(Array("TryGet")) - final def jsonContentOf(resourcePath: String): Json = - parse(contentOf(resourcePath)).toTry.get - - /** - * Loads the content of the argument classpath resource as a string and replaces all the key matches of the - * ''replacements'' with their values. The resulting string is parsed into a json value. - * - * @param resourcePath - * the path of a resource available on the classpath - * @return - * the content of the referenced resource as a json value - */ - @SuppressWarnings(Array("TryGet")) - final def jsonContentOf(resourcePath: String, replacements: Map[String, String]): Json = - parse(contentOf(resourcePath, replacements)).toTry.get -} diff --git a/tests/docker/docker-compose.yml b/tests/docker/docker-compose.yml index 183a9093c9..9436c548de 100644 --- a/tests/docker/docker-compose.yml +++ b/tests/docker/docker-compose.yml @@ -146,7 +146,7 @@ services: storage-service: container_name: "nexus-storage-service" - image: bluebrain/nexus-storage:latest + image: bluebrain/nexus-storage:1.10.0 environment: STORAGE_CONFIG_FILE: "/config/storage.conf" KAMON_ENABLED: "false"