diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index f141dbd..4099e03 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -1,24 +1,18 @@ name: Test and build on: - pull_request: - branches: - - main + push: jobs: test: runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v1 - - name: Setup Scala - uses: olafurpg/setup-scala@v10 + - uses: actions/checkout@v3 + - uses: actions/setup-java@v3 with: - java-version: "adopt@1.11" - - name: Test - run: sbt -v -Dfile.encoding=UTF-8 +test - - name: Publish Test Report - uses: mikepenz/action-junit-report@v2 - with: - report_paths: 'target/test-reports/TEST-*.xml' - - name: Build - run: sbt -v -Dfile.encoding=UTF-8 +universal:packageZipTarball \ No newline at end of file + distribution: 'corretto' + java-version: '17' + cache: 'sbt' + - name: Run tests + run: sbt test + - name: Assembly + run: sbt assembly \ No newline at end of file diff --git a/.github/workflows/publish_with_latest.yml b/.github/workflows/publish_with_latest.yml index 235ab4f..9eb3644 100644 --- a/.github/workflows/publish_with_latest.yml +++ b/.github/workflows/publish_with_latest.yml @@ -10,20 +10,16 @@ jobs: name: Publish Image using latest tag runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Setup Scala - uses: olafurpg/setup-scala@v10 + - uses: actions/checkout@v3 + - uses: actions/setup-java@v3 with: - java-version: "adopt@1.11" - - name: Test - run: sbt -v -Dfile.encoding=UTF-8 +test - - name: Publish Test Report - uses: mikepenz/action-junit-report@v2 - with: - report_paths: 'target/test-reports/TEST-*.xml' - - name: Build - run: sbt -v -Dfile.encoding=UTF-8 +universal:packageZipTarball + distribution: 'corretto' + java-version: '17' + cache: 'sbt' + - name: Run tests + run: sbt test + - name: Assembly + run: sbt assembly - name: Push the image on the registry uses: Ferlab-Ste-Justine/action-push-image@v2 with: diff --git a/.github/workflows/publish_with_semver_tag.yml b/.github/workflows/publish_with_semver_tag.yml index 9776fe2..b2b2621 100644 --- a/.github/workflows/publish_with_semver_tag.yml +++ b/.github/workflows/publish_with_semver_tag.yml @@ -10,20 +10,15 @@ jobs: name: Publish Image using tags runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Setup Scala - uses: olafurpg/setup-scala@v10 + - uses: actions/setup-java@v3 with: - java-version: "adopt@1.11" - - name: Test - run: sbt -v -Dfile.encoding=UTF-8 +test - - name: Publish Test Report - uses: mikepenz/action-junit-report@v2 - with: - report_paths: 'target/test-reports/TEST-*.xml' - - name: Build - run: sbt -v -Dfile.encoding=UTF-8 +universal:packageZipTarball + distribution: 'corretto' + java-version: '17' + cache: 'sbt' + - name: Run tests + run: sbt test + - name: Assembly + run: sbt assembly - name: Push the image on the registry uses: Ferlab-Ste-Justine/action-push-image@v2 with: diff --git a/.github/workflows/publish_with_sha.yml b/.github/workflows/publish_with_sha.yml index 3fe4b1f..9ff6f83 100644 --- a/.github/workflows/publish_with_sha.yml +++ b/.github/workflows/publish_with_sha.yml @@ -10,20 +10,15 @@ jobs: name: Publish Image using commit sha and timestamp runs-on: ubuntu-latest steps: - - name: Checkout - uses: actions/checkout@v2 - - name: Setup Scala - uses: olafurpg/setup-scala@v10 + - uses: actions/setup-java@v3 with: - java-version: "adopt@1.11" - - name: Test - run: sbt -v -Dfile.encoding=UTF-8 +test - - name: Publish Test Report - uses: mikepenz/action-junit-report@v2 - with: - report_paths: 'target/test-reports/TEST-*.xml' - - name: Build - run: sbt -v -Dfile.encoding=UTF-8 +universal:packageZipTarball + distribution: 'corretto' + java-version: '17' + cache: 'sbt' + - name: Run tests + run: sbt test + - name: Assembly + run: sbt assembly - name: Push the image on the registry uses: Ferlab-Ste-Justine/action-push-image@v2 with: diff --git a/.gitignore b/.gitignore index e497f3f..456ae26 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,11 @@ -logs +# common IDEs to be ignored +.idea/ +.metals/ +.vscode/ + +# build tool specific entries +.bloop +.bsp target -/.idea -/.g8 -/.idea_modules -/.classpath -/.project -/.settings -/RUNNING_PID +metals.sbt +project/project diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 0000000..afe8607 --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1,3 @@ +version = 3.7.14 +maxColumn = 140 +runner.dialect = scala3 diff --git a/Dockerfile b/Dockerfile index 3c625e8..569dfa6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,9 +1,23 @@ -FROM adoptopenjdk/openjdk11:alpine-jre +FROM amazoncorretto:17-alpine as build-jre +WORKDIR /tmp/jre +# required for strip-debug to work +RUN apk add --no-cache binutils +# Build small JRE image +RUN jlink \ + --add-modules ALL-MODULE-PATH \ + --strip-debug \ + --no-man-pages \ + --no-header-files \ + --compress=2 \ + --output slim -RUN apk update && apk add bash ca-certificates openssl - -COPY target/universal/ferload.tgz . - -RUN tar xvf ferload.tgz - -ENTRYPOINT ["/ferload/bin/ferload"] \ No newline at end of file +FROM alpine:latest +WORKDIR /app +ENV JAVA_HOME=/jre +ENV JAVA_OPTS="-XX:+UseG1GC -XX:+UseContainerSupport -XX:MaxRAMPercentage=75.0 -XshowSettings:vm -XX:+PrintCommandLineFlags" +ENV PATH="$PATH:$JAVA_HOME/bin" +RUN apk update && apk add ca-certificates openssl +COPY --from=build-jre /tmp/jre/slim $JAVA_HOME +COPY target/scala-3.3.1/ferload.jar . +EXPOSE 9090 +ENTRYPOINT ["java", "-jar", "ferload.jar"] \ No newline at end of file diff --git a/README.md b/README.md index 827b7ae..a182274 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,99 @@ -# ferload +# Ferload -This service is used to dowload files stored in an object store s3 compliant. It will : -- Verify if user has access to the object (validating jwt token) -- Generate a presigned url for this object + [![Docker Pulls](https://img.shields.io/docker/pulls/ferlabcrsj/ferload)](https://hub.docker.com/r/ferlab/ferload) + [![Docker Image Size (latest SEMVER)](https://img.shields.io/docker/image-size/ferlabcrsj/ferload?sort=semver)](https://hub.docker.com/r/ferlab/ferload) + [![Docker Image Version (latest semver)](https://img.shields.io/docker/v/ferlabcrsj/ferload?sort=semver)](https://hub.docker.com/r/ferlab/ferload) +Ferload is an api that allows to control access to files stored in any object store S3-compliant, and generate presigned url if user is granted. +Ferload is compliant with [GA4GH Data Repository Service](https://ga4gh.github.io/data-repository-service-schemas/). For now, only Bearers tokens are supported (not Passport and Visas). + + +## Technologies + +Ferload is developed in Scala3 and is based on [tapir](https://tapir.softwaremill.com/en/latest/) and [cats-effect](https://typelevel.org/cats-effect/). + +## Quick start + +If you don't have [sbt](https://www.scala-sbt.org) installed already, you can use the provided wrapper script: + +```shell +./sbtx -h # shows an usage of a wrapper script +./sbtx compile # build the project +./sbtx test # run the tests +./sbtx run # run the application (Main) +``` + +For more details check the [sbtx usage](https://github.com/dwijnand/sbt-extras#sbt--h) page. + +Otherwise, if sbt is already installed, you can use the standard commands: + +```shell +sbt compile # build the project +sbt test # run the tests +sbt run # run the application (Main) +``` + +## Build + +```shell +sbt asembly +docker build -t ferload . +``` + +## Environment variables + +Keyckloak Authentication server information : + +- `AUTH_URL` : Keycloak URL +- `AUTH_REALM` : Keycloak Realm +- `AUTH_CLIENT_ID` : Id of the client that contains resource definition and permissions +- `AUTH_CLIENT_SECRET` : Secret of the client that contains resource definition and permissions +- `AUTH_RESOURCES_POLICY_GLOBAL_NAME` : Name of the resource a user should have access to be able to download all files. + Works only with endpoints that fetch files by urls. Can be empty. + +Ferload Client: This section is used to configure ferload clients taht can be installed to download files by requesting ferload endpoints. +- `FERLOAD_CLIENT_METHOD` : 2 possible values : `token`or `password`. Default `token`. +- `FERLOAD_CLIENT_CLIENT_ID` : client id to use to authenticate user in case of `password` method. +- `FERLOAD_CLIENT_TOKEN_LINK` : url to use to fetch new token in case of `token` method. +- `FERLOAD_CLIENT_TOKEN_HELPER` : text to display in ferload client to explain how to get a new token. Used only if `FERLOAD_CLIENT_METHOD` is `token`. + + AWS S3 information : + +- `AWS_ACCESS_KEY` : Access key of the AWS account +- `AWS_SECRET_KEY` : Secret key of the AWS account +- `AWS_BUCKET` : Default bucket to use if objects are fetched by urls. Can be empty. +- `AWS_ENDPOINT`: Endpoint to S3 service. Can be empty. +- `AWS_PATH_ACCESS_STYLE` : Path access style to S3 service (true for minio, false for AWS). Default false. +- `AWS_PRESIGNED_URL_EXPIRATION_IN_SECONDS` : Expiration time of presigned urls. Default 3600. +- `AWS_REGION` : Region of the AWS account. Can be empty. + +DRS Information : +- `DRS_ID` : DRS Server ID. +- `DRS_NAME` : DRS Name. +- `DRS_ORGANIZATION_NAME` : DRS Organization. +- `DRS_ORGANIZATION_URL` : DRS Organization url. +- `DRS_SELF_HOST` : DRS self host, used in `self_uri` property of `DrsObject`. +- `DRS_VERSION` : DRS Version. Default 1.3.0 +- `DRS_CONTACT_URL` : DRS contact url. Can be empty. +- `DRS_DESCRIPTION` : DRS description. Can be empty. +- `DRS_DOCUMENTATION_URL` : DRS documentation url. Can be empty. +- `DRS_ENVIRONMENT` : DRS environment. Can be empty. + +HTTP Server information : +- `HTTP_HOST` : Address HTTP server should listen to. Default 0.0.0.0 (all interfaces) +- `HTTP_PORT`: Port HTTP server should listen to. Default 9090 + +Log configuration : +- `LOG_LEVEL` : Log level. Default WARN. + +## Endpoints : + +- Swagger : /docs +- Status : /status +- Prometheus : /metrics + +## Links: + +* [tapir documentation](https://tapir.softwaremill.com/en/latest/) +* [tapir github](https://github.com/softwaremill/tapir) +* [sbtx wrapper](https://github.com/dwijnand/sbt-extras#installation) diff --git a/app/services/keycloak/PermsService.scala b/app/services/keycloak/PermsService.scala index 4c32816..20492ba 100644 --- a/app/services/keycloak/PermsService.scala +++ b/app/services/keycloak/PermsService.scala @@ -2,11 +2,12 @@ package services.keycloak import auth.UserRequest import org.keycloak.authorization.client.util.HttpResponseException -import org.keycloak.representations.idm.authorization.UmaPermissionRepresentation +import org.keycloak.representations.idm.authorization.{ResourceRepresentation, ScopeRepresentation, UmaPermissionRepresentation} import play.api.Configuration import play.api.mvc.AnyContent import java.util +import java.util.{Collections, OptionalInt} import javax.inject.{Inject, Singleton} import scala.jdk.CollectionConverters._ @@ -41,6 +42,24 @@ class PermsService @Inject()(config: Configuration, permsClient: PermsClient) { } } + def createResources(token: String, resources: Set[CreateResource]) = { + val protection = permsClient.authzClient.protection(token) // insure token has resource creation rights + val resource = protection.resource() + resources.map { + r => + val scopes: java.util.Set[ScopeRepresentation] = r.scopes.map(s => new ScopeRepresentation(s)).asJava + val rr = new ResourceRepresentation(r.name.getOrElse(r.id), scopes, r.uri.orNull, r.`type`.orNull) + rr.setId(r.id) + val attributes = new util.HashMap[String, util.List[String]]() + attributes.put("location", Collections.singletonList(r.location)) + attributes.put("checksum", r.checksums.toList.asJava) + rr.setAttributes(attributes) + rr + }.map { r => + resource.create(r) + } + } + def createPermissions(token: String, userName: String, files: Set[String]): (Set[String], Set[String]) = { val protection = permsClient.authzClient.protection(token) // insure token has resource creation rights val resources = protection.resource() @@ -75,3 +94,5 @@ class PermsService @Inject()(config: Configuration, permsClient: PermsClient) { (createdPerms, files.diff(createdPerms)) } } + +case class CreateResource(id: String, name: Option[String], `type`: Option[String], uri: Option[String], scopes: Set[String], checksums: Set[String], location: String, size: Option[Long]) diff --git a/build.sbt b/build.sbt index 64359c7..3d4e510 100644 --- a/build.sbt +++ b/build.sbt @@ -1,28 +1,45 @@ -name := """ferload""" -organization := "bio.ferlab" -version := "1.0-SNAPSHOT" -lazy val root = (project in file(".")).enablePlugins(PlayScala) -val keycloakVersion = "17.0.1" +resolvers += "Sonatype Releases" at "https://s01.oss.sonatype.org/content/repositories/releases/" +val tapirVersion = "1.7.3" +val http4sVersion = "0.23.23" +val sstpVersion = "3.9.0" +scalacOptions ++= Seq("-Xmax-inlines", "100") +javacOptions ++= Seq("-source", "17", "-target", "17") +lazy val rootProject = (project in file(".")).settings( + Seq( + name := "ferload", + version := "0.1.0-SNAPSHOT", + organization := "bio.ferlab", + scalaVersion := "3.3.1", + assembly / assemblyJarName := "ferload.jar", + assembly / assemblyMergeStrategy:= { + case PathList("META-INF", "mailcap") => MergeStrategy.first + case PathList("META-INF", "io.netty.versions.properties") => MergeStrategy.first + case PathList("META-INF", "maven", "org.webjars", "swagger-ui", "pom.properties") => MergeStrategy.first + case PathList("module-info.class") => MergeStrategy.discard + case x => MergeStrategy.defaultMergeStrategy(x) + }, + libraryDependencies ++= Seq( + "com.softwaremill.sttp.tapir" %% "tapir-http4s-server" % tapirVersion, + "org.http4s" %% "http4s-ember-server" % http4sVersion, + "org.http4s" %% "http4s-ember-client" % http4sVersion, + "org.http4s" %% "http4s-circe" % http4sVersion, + "com.softwaremill.sttp.tapir" %% "tapir-prometheus-metrics" % tapirVersion, + "com.softwaremill.sttp.tapir" %% "tapir-swagger-ui-bundle" % tapirVersion, + "com.softwaremill.sttp.tapir" %% "tapir-json-circe" % tapirVersion, + "ch.qos.logback" % "logback-classic" % "1.4.11", + "com.softwaremill.sttp.client3" %% "http4s-backend" % sstpVersion, + "com.softwaremill.sttp.client3" %% "circe" % sstpVersion, + "software.amazon.awssdk" % "s3" % "2.20.150", + "com.github.ben-manes.caffeine" % "caffeine" % "3.1.8", + "com.softwaremill.sttp.tapir" %% "tapir-sttp-stub-server" % tapirVersion % Test, + "com.softwaremill.sttp.tapir" %% "tapir-sttp-client" % "1.7.5" % Test, + "com.softwaremill.sttp.tapir" %% "tapir-http4s-client" % "1.7.5" % Test, + "org.scalatest" %% "scalatest" % "3.2.17" % Test, + ) -scalaVersion := "2.13.9" + ) +) -libraryDependencies += guice -libraryDependencies += caffeine -libraryDependencies += ws -libraryDependencies += "software.amazon.awssdk" % "s3" % "2.17.295" -//libraryDependencies += "com.amazonaws" % "aws-java-sdk-s3" % "1.11.880" -libraryDependencies += "org.keycloak" % "keycloak-core" % keycloakVersion -libraryDependencies += "org.keycloak" % "keycloak-authz-client" % keycloakVersion -//libraryDependencies += "org.keycloak" % "keycloak-adapter-core" % keycloakVersion -libraryDependencies += "org.mockito" % "mockito-core" % "3.8.0" % Test -libraryDependencies += "org.scalatestplus.play" %% "scalatestplus-play" % "5.0.0" % Test -libraryDependencies += "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.12.3" -packageName in Universal := name.value -// Adds additional packages into Twirl -//TwirlKeys.templateImports += "bio.ferlab.controllers._" - -// Adds additional packages into conf/routes -// play.sbt.routes.RoutesKeys.routesImport += "bio.ferlab.binders._" diff --git a/project/build.properties b/project/build.properties index 0837f7a..3040987 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.13 +sbt.version=1.9.4 diff --git a/project/plugins.sbt b/project/plugins.sbt index c972307..bcc1878 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,2 +1,3 @@ -addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.7") -addSbtPlugin("org.foundweekends.giter8" % "sbt-giter8-scaffold" % "0.11.0") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") +addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "1.5.11") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.1.3") \ No newline at end of file diff --git a/sbtx b/sbtx new file mode 100755 index 0000000..10c3d2c --- /dev/null +++ b/sbtx @@ -0,0 +1,664 @@ +#!/usr/bin/env bash +# +# A more capable sbt runner, coincidentally also called sbt. +# Author: Paul Phillips +# https://github.com/paulp/sbt-extras +# +# Generated from http://www.opensource.org/licenses/bsd-license.php +# Copyright (c) 2011, Paul Phillips. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the author nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +set -o pipefail + +declare -r sbt_release_version="1.6.2" +declare -r sbt_unreleased_version="1.7.0-M2" + +declare -r latest_213="2.13.8" +declare -r latest_212="2.12.16" +declare -r latest_211="2.11.12" +declare -r latest_210="2.10.7" +declare -r latest_29="2.9.3" +declare -r latest_28="2.8.2" + +declare -r buildProps="project/build.properties" + +declare -r sbt_launch_ivy_release_repo="https://repo.typesafe.com/typesafe/ivy-releases" +declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" +declare -r sbt_launch_mvn_release_repo="https://repo1.maven.org/maven2" +declare -r sbt_launch_mvn_snapshot_repo="https://repo.scala-sbt.org/scalasbt/maven-snapshots" + +declare -r default_jvm_opts_common="-Xms512m -Xss2m -XX:MaxInlineLevel=18" +declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy -Dsbt.coursier.home=project/.coursier" + +declare sbt_jar sbt_dir sbt_create sbt_version sbt_script sbt_new +declare sbt_explicit_version +declare verbose noshare batch trace_level + +declare java_cmd="java" +declare sbt_launch_dir="$HOME/.sbt/launchers" +declare sbt_launch_repo + +# pull -J and -D options to give to java. +declare -a java_args scalac_args sbt_commands residual_args + +# args to jvm/sbt via files or environment variables +declare -a extra_jvm_opts extra_sbt_opts + +echoerr() { echo >&2 "$@"; } +vlog() { [[ -n "$verbose" ]] && echoerr "$@"; } +die() { + echo "Aborting: $*" + exit 1 +} + +setTrapExit() { + # save stty and trap exit, to ensure echo is re-enabled if we are interrupted. + SBT_STTY="$(stty -g 2>/dev/null)" + export SBT_STTY + + # restore stty settings (echo in particular) + onSbtRunnerExit() { + [ -t 0 ] || return + vlog "" + vlog "restoring stty: $SBT_STTY" + stty "$SBT_STTY" + } + + vlog "saving stty: $SBT_STTY" + trap onSbtRunnerExit EXIT +} + +# this seems to cover the bases on OSX, and someone will +# have to tell me about the others. +get_script_path() { + local path="$1" + [[ -L "$path" ]] || { + echo "$path" + return + } + + local -r target="$(readlink "$path")" + if [[ "${target:0:1}" == "/" ]]; then + echo "$target" + else + echo "${path%/*}/$target" + fi +} + +script_path="$(get_script_path "${BASH_SOURCE[0]}")" +declare -r script_path +script_name="${script_path##*/}" +declare -r script_name + +init_default_option_file() { + local overriding_var="${!1}" + local default_file="$2" + if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then + local envvar_file="${BASH_REMATCH[1]}" + if [[ -r "$envvar_file" ]]; then + default_file="$envvar_file" + fi + fi + echo "$default_file" +} + +sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)" +sbtx_opts_file="$(init_default_option_file SBTX_OPTS .sbtxopts)" +jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)" + +build_props_sbt() { + [[ -r "$buildProps" ]] && + grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }' +} + +set_sbt_version() { + sbt_version="${sbt_explicit_version:-$(build_props_sbt)}" + [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version + export sbt_version +} + +url_base() { + local version="$1" + + case "$version" in + 0.7.*) echo "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/simple-build-tool" ;; + 0.10.*) echo "$sbt_launch_ivy_release_repo" ;; + 0.11.[12]) echo "$sbt_launch_ivy_release_repo" ;; + 0.*-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" + echo "$sbt_launch_ivy_snapshot_repo" ;; + 0.*) echo "$sbt_launch_ivy_release_repo" ;; + *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]T[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmddThhMMss" + echo "$sbt_launch_mvn_snapshot_repo" ;; + *) echo "$sbt_launch_mvn_release_repo" ;; + esac +} + +make_url() { + local version="$1" + + local base="${sbt_launch_repo:-$(url_base "$version")}" + + case "$version" in + 0.7.*) echo "$base/sbt-launch-0.7.7.jar" ;; + 0.10.*) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; + 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; + 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; + *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch-${version}.jar" ;; + esac +} + +addJava() { + vlog "[addJava] arg = '$1'" + java_args+=("$1") +} +addSbt() { + vlog "[addSbt] arg = '$1'" + sbt_commands+=("$1") +} +addScalac() { + vlog "[addScalac] arg = '$1'" + scalac_args+=("$1") +} +addResidual() { + vlog "[residual] arg = '$1'" + residual_args+=("$1") +} + +addResolver() { addSbt "set resolvers += $1"; } + +addDebugger() { addJava "-Xdebug" && addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } + +setThisBuild() { + vlog "[addBuild] args = '$*'" + local key="$1" && shift + addSbt "set $key in ThisBuild := $*" +} +setScalaVersion() { + [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")' + addSbt "++ $1" +} +setJavaHome() { + java_cmd="$1/bin/java" + setThisBuild javaHome "_root_.scala.Some(file(\"$1\"))" + export JAVA_HOME="$1" + export JDK_HOME="$1" + export PATH="$JAVA_HOME/bin:$PATH" +} + +getJavaVersion() { + local -r str=$("$1" -version 2>&1 | grep -E -e '(java|openjdk) version' | awk '{ print $3 }' | tr -d '"') + + # java -version on java8 says 1.8.x + # but on 9 and 10 it's 9.x.y and 10.x.y. + if [[ "$str" =~ ^1\.([0-9]+)(\..*)?$ ]]; then + echo "${BASH_REMATCH[1]}" + # Fixes https://github.com/dwijnand/sbt-extras/issues/326 + elif [[ "$str" =~ ^([0-9]+)(\..*)?(-ea)?$ ]]; then + echo "${BASH_REMATCH[1]}" + elif [[ -n "$str" ]]; then + echoerr "Can't parse java version from: $str" + fi +} + +checkJava() { + # Warn if there is a Java version mismatch between PATH and JAVA_HOME/JDK_HOME + + [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" + [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" + + if [[ -n "$java" ]]; then + pathJavaVersion=$(getJavaVersion java) + homeJavaVersion=$(getJavaVersion "$java") + if [[ "$pathJavaVersion" != "$homeJavaVersion" ]]; then + echoerr "Warning: Java version mismatch between PATH and JAVA_HOME/JDK_HOME, sbt will use the one in PATH" + echoerr " Either: fix your PATH, remove JAVA_HOME/JDK_HOME or use -java-home" + echoerr " java version from PATH: $pathJavaVersion" + echoerr " java version from JAVA_HOME/JDK_HOME: $homeJavaVersion" + fi + fi +} + +java_version() { + local -r version=$(getJavaVersion "$java_cmd") + vlog "Detected Java version: $version" + echo "$version" +} + +is_apple_silicon() { [[ "$(uname -s)" == "Darwin" && "$(uname -m)" == "arm64" ]]; } + +# MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+ +default_jvm_opts() { + local -r v="$(java_version)" + if [[ $v -ge 17 ]]; then + echo "$default_jvm_opts_common" + elif [[ $v -ge 10 ]]; then + if is_apple_silicon; then + # As of Dec 2020, JVM for Apple Silicon (M1) doesn't support JVMCI + echo "$default_jvm_opts_common" + else + echo "$default_jvm_opts_common -XX:+UnlockExperimentalVMOptions -XX:+UseJVMCICompiler" + fi + elif [[ $v -ge 8 ]]; then + echo "$default_jvm_opts_common" + else + echo "-XX:MaxPermSize=384m $default_jvm_opts_common" + fi +} + +execRunner() { + # print the arguments one to a line, quoting any containing spaces + vlog "# Executing command line:" && { + for arg; do + if [[ -n "$arg" ]]; then + if printf "%s\n" "$arg" | grep -q ' '; then + printf >&2 "\"%s\"\n" "$arg" + else + printf >&2 "%s\n" "$arg" + fi + fi + done + vlog "" + } + + setTrapExit + + if [[ -n "$batch" ]]; then + "$@" /dev/null 2>&1; then + curl --fail --silent --location "$url" --output "$jar" + elif command -v wget >/dev/null 2>&1; then + wget -q -O "$jar" "$url" + fi + } && [[ -r "$jar" ]] +} + +acquire_sbt_jar() { + { + sbt_jar="$(jar_file "$sbt_version")" + [[ -r "$sbt_jar" ]] + } || { + sbt_jar="$HOME/.ivy2/local/org.scala-sbt/sbt-launch/$sbt_version/jars/sbt-launch.jar" + [[ -r "$sbt_jar" ]] + } || { + sbt_jar="$(jar_file "$sbt_version")" + jar_url="$(make_url "$sbt_version")" + + echoerr "Downloading sbt launcher for ${sbt_version}:" + echoerr " From ${jar_url}" + echoerr " To ${sbt_jar}" + + download_url "${jar_url}" "${sbt_jar}" + + case "${sbt_version}" in + 0.*) + vlog "SBT versions < 1.0 do not have published MD5 checksums, skipping check" + echo "" + ;; + *) verify_sbt_jar "${sbt_jar}" ;; + esac + } +} + +verify_sbt_jar() { + local jar="${1}" + local md5="${jar}.md5" + md5url="$(make_url "${sbt_version}").md5" + + echoerr "Downloading sbt launcher ${sbt_version} md5 hash:" + echoerr " From ${md5url}" + echoerr " To ${md5}" + + download_url "${md5url}" "${md5}" >/dev/null 2>&1 + + if command -v md5sum >/dev/null 2>&1; then + if echo "$(cat "${md5}") ${jar}" | md5sum -c -; then + rm -rf "${md5}" + return 0 + else + echoerr "Checksum does not match" + return 1 + fi + elif command -v md5 >/dev/null 2>&1; then + if [ "$(md5 -q "${jar}")" == "$(cat "${md5}")" ]; then + rm -rf "${md5}" + return 0 + else + echoerr "Checksum does not match" + return 1 + fi + elif command -v openssl >/dev/null 2>&1; then + if [ "$(openssl md5 -r "${jar}" | awk '{print $1}')" == "$(cat "${md5}")" ]; then + rm -rf "${md5}" + return 0 + else + echoerr "Checksum does not match" + return 1 + fi + else + echoerr "Could not find an MD5 command" + return 1 + fi +} + +usage() { + set_sbt_version + cat < display stack traces with a max of frames (default: -1, traces suppressed) + -debug-inc enable debugging log for the incremental compiler + -no-colors disable ANSI color codes + -sbt-create start sbt even if current directory contains no sbt project + -sbt-dir path to global settings/plugins directory (default: ~/.sbt/) + -sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11+) + -ivy path to local Ivy repository (default: ~/.ivy2) + -no-share use all local caches; no sharing + -offline put sbt in offline mode + -jvm-debug Turn on JVM debugging, open at the given port. + -batch Disable interactive mode + -prompt Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted + -script Run the specified file as a scala script + + # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version) + -sbt-version use the specified version of sbt (default: $sbt_release_version) + -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version + -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version + -sbt-jar use the specified jar as the sbt launcher + -sbt-launch-dir directory to hold sbt launchers (default: $sbt_launch_dir) + -sbt-launch-repo repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) + + # scala version (default: as chosen by sbt) + -28 use $latest_28 + -29 use $latest_29 + -210 use $latest_210 + -211 use $latest_211 + -212 use $latest_212 + -213 use $latest_213 + -scala-home use the scala build at the specified directory + -scala-version use the specified version of scala + -binary-version use the specified scala version when searching for dependencies + + # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) + -java-home alternate JAVA_HOME + + # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution + # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found + $(default_jvm_opts) + JVM_OPTS environment variable holding either the jvm args directly, or + the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') + Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. + -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present) + -Dkey=val pass -Dkey=val directly to the jvm + -J-X pass option -X directly to the jvm (-J is stripped) + + # passing options to sbt, OR to this runner + SBT_OPTS environment variable holding either the sbt args directly, or + the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') + Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. + -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present) + -S-X add -X to sbt's scalacOptions (-S is stripped) + + # passing options exclusively to this runner + SBTX_OPTS environment variable holding either the sbt-extras args directly, or + the reference to a file containing sbt-extras args if given path is prepended by '@' (e.g. '@/etc/sbtxopts') + Note: "@"-file is overridden by local '.sbtxopts' or '-sbtx-opts' argument. + -sbtx-opts file containing sbt-extras args (if not given, .sbtxopts in project root is used if present) +EOM + exit 0 +} + +process_args() { + require_arg() { + local type="$1" + local opt="$2" + local arg="$3" + + if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then + die "$opt requires <$type> argument" + fi + } + while [[ $# -gt 0 ]]; do + case "$1" in + -h | -help) usage ;; + -v) verbose=true && shift ;; + -d) addSbt "--debug" && shift ;; + -w) addSbt "--warn" && shift ;; + -q) addSbt "--error" && shift ;; + -x) shift ;; # currently unused + -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; + -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; + + -no-colors) addJava "-Dsbt.log.noformat=true" && addJava "-Dsbt.color=false" && shift ;; + -sbt-create) sbt_create=true && shift ;; + -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; + -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; + -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; + -no-share) noshare=true && shift ;; + -offline) addSbt "set offline in Global := true" && shift ;; + -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; + -batch) batch=true && shift ;; + -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; + -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; + + -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;; + -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; + -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; + -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; + -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; + -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; + + -28) setScalaVersion "$latest_28" && shift ;; + -29) setScalaVersion "$latest_29" && shift ;; + -210) setScalaVersion "$latest_210" && shift ;; + -211) setScalaVersion "$latest_211" && shift ;; + -212) setScalaVersion "$latest_212" && shift ;; + -213) setScalaVersion "$latest_213" && shift ;; + + -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; + -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; + -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; + -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; + -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; + -sbtx-opts) require_arg path "$1" "$2" && sbtx_opts_file="$2" && shift 2 ;; + -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; + + -D*) addJava "$1" && shift ;; + -J*) addJava "${1:2}" && shift ;; + -S*) addScalac "${1:2}" && shift ;; + + new) sbt_new=true && : ${sbt_explicit_version:=$sbt_release_version} && addResidual "$1" && shift ;; + + *) addResidual "$1" && shift ;; + esac + done +} + +# process the direct command line arguments +process_args "$@" + +# skip #-styled comments and blank lines +readConfigFile() { + local end=false + until $end; do + read -r || end=true + [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY" + done <"$1" +} + +# if there are file/environment sbt_opts, process again so we +# can supply args to this runner +if [[ -r "$sbt_opts_file" ]]; then + vlog "Using sbt options defined in file $sbt_opts_file" + while read -r opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file") +elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then + vlog "Using sbt options defined in variable \$SBT_OPTS" + IFS=" " read -r -a extra_sbt_opts <<<"$SBT_OPTS" +else + vlog "No extra sbt options have been defined" +fi + +# if there are file/environment sbtx_opts, process again so we +# can supply args to this runner +if [[ -r "$sbtx_opts_file" ]]; then + vlog "Using sbt options defined in file $sbtx_opts_file" + while read -r opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbtx_opts_file") +elif [[ -n "$SBTX_OPTS" && ! ("$SBTX_OPTS" =~ ^@.*) ]]; then + vlog "Using sbt options defined in variable \$SBTX_OPTS" + IFS=" " read -r -a extra_sbt_opts <<<"$SBTX_OPTS" +else + vlog "No extra sbt options have been defined" +fi + +[[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}" + +# reset "$@" to the residual args +set -- "${residual_args[@]}" +argumentCount=$# + +# set sbt version +set_sbt_version + +checkJava + +# only exists in 0.12+ +setTraceLevel() { + case "$sbt_version" in + "0.7."* | "0.10."* | "0.11."*) echoerr "Cannot set trace level in sbt version $sbt_version" ;; + *) setThisBuild traceLevel "$trace_level" ;; + esac +} + +# set scalacOptions if we were given any -S opts +[[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[*]}\"" + +[[ -n "$sbt_explicit_version" && -z "$sbt_new" ]] && addJava "-Dsbt.version=$sbt_explicit_version" +vlog "Detected sbt version $sbt_version" + +if [[ -n "$sbt_script" ]]; then + residual_args=("$sbt_script" "${residual_args[@]}") +else + # no args - alert them there's stuff in here + ((argumentCount > 0)) || { + vlog "Starting $script_name: invoke with -help for other options" + residual_args=(shell) + } +fi + +# verify this is an sbt dir, -create was given or user attempts to run a scala script +[[ -r ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_script" || -n "$sbt_new" ]] || { + cat < + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + diff --git a/src/main/scala/bio/ferlab/ferload/Config.scala b/src/main/scala/bio/ferlab/ferload/Config.scala new file mode 100644 index 0000000..0f02de8 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/Config.scala @@ -0,0 +1,122 @@ +package bio.ferlab.ferload + +case class Config(auth: AuthConfig, http: HttpConfig, s3Config: S3Config, drsConfig: DrsConfig, ferloadClientConfig: FerloadClientConfig) + +case class S3Config( + accessKey: Option[String], + secretKey: Option[String], + endpoint: Option[String], + defaultBucket: Option[String], + pathAccessStyle: Boolean, + region: Option[String], + expirationPresignedUrlInSeconds: Int + ) + +object S3Config { + def load(): S3Config = { + S3Config( + sys.env.get("AWS_ACCESS_KEY"), + sys.env.get("AWS_SECRET_KEY"), + sys.env.get("AWS_ENDPOINT"), + sys.env.get("AWS_BUCKET"), + sys.env.get("AWS_PATH_ACCESS_STYLE").exists(_.toBoolean), + sys.env.get("AWS_REGION"), + sys.env.get("AWS_PRESIGNED_URL_EXPIRATION_IN_SECONDS").map(_.toInt).getOrElse(3600) + ) + } +} + +case class DrsConfig( + id: String, + name: String, + version: String, + selfHost: String, + organizationName: String, + organizationUrl: String, + description: Option[String] = None, + contactUrl: Option[String] = None, + documentationUrl: Option[String] = None, + environment: Option[String] = None, + ) + +object DrsConfig { + def load(): DrsConfig = { + DrsConfig( + sys.env("DRS_ID"), + sys.env("DRS_NAME"), + sys.env.getOrElse("DRS_VERSION", "1.3.0"), + sys.env("DRS_SELF_HOST"), + sys.env("DRS_ORGANIZATION_NAME"), + sys.env("DRS_ORGANIZATION_URL"), + sys.env.get("DRS_DESCRIPTION"), + sys.env.get("DRS_CONTACT_URL"), + sys.env.get("DRS_DOCUMENTATION_URL"), + sys.env.get("DRS_ENVIRONMENT"), + + ) + } +} + +case class HttpConfig(host: String, port: Int) + +object HttpConfig { + private val DEFAULT_PORT = 9090 + private val DEFAULT_HOST = "0.0.0.0" + + def load(): HttpConfig = { + val port = sys.env + .get("HTTP_PORT") + .map(_.toInt) + .getOrElse(DEFAULT_PORT) + val host = sys.env + .getOrElse("HTTP_HOST", DEFAULT_HOST) + HttpConfig(host, port) + } +} + +case class AuthConfig(authUrl: String, realm: String, clientId: String, clientSecret: String, resourcesGlobalName: Option[String]) { + val baseUri = s"$authUrl/realms/$realm" +} + +case class FerloadClientConfig(method: String, clientId: Option[String], tokenLink: Option[String], tokenHelper: Option[String]) + +object FerloadClientConfig { + val TOKEN: String = "token" + val PASSWORD: String = "password" + def load(): FerloadClientConfig = { + val f = FerloadClientConfig( + sys.env.getOrElse("FERLOAD_CLIENT_METHOD", "token"), + sys.env.get("FERLOAD_CLIENT_CLIENT_ID"), + sys.env.get("FERLOAD_CLIENT_TOKEN_LINK"), + sys.env.get("FERLOAD_CLIENT_TOKEN_HELPER") + ) + if (f.method != TOKEN && f.method != PASSWORD) { + throw new IllegalArgumentException(s"FERLOAD_CLIENT_METHOD must be $TOKEN or $PASSWORD") + } + if (f.method == TOKEN && f.tokenLink.isEmpty) { + throw new IllegalArgumentException(s"FERLOAD_CLIENT_TOKEN_LINK must be set when FERLOAD_CLIENT_METHOD is $TOKEN") + } + f + } +} + +object Config { + def load(): Config = { + + Config( + AuthConfig( + sys.env("AUTH_URL"), + sys.env("AUTH_REALM"), + sys.env("AUTH_CLIENT_ID"), + sys.env("AUTH_CLIENT_SECRET"), + sys.env.get("AUTH_RESOURCES_POLICY_GLOBAL_NAME") + ), + HttpConfig.load(), + S3Config.load(), + DrsConfig.load(), + FerloadClientConfig.load() + ) + + } +} + diff --git a/src/main/scala/bio/ferlab/ferload/Main.scala b/src/main/scala/bio/ferlab/ferload/Main.scala new file mode 100644 index 0000000..2ff88eb --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/Main.scala @@ -0,0 +1,68 @@ +package bio.ferlab.ferload + +import bio.ferlab.ferload.endpoints.Endpoints +import bio.ferlab.ferload.services.{AuthorizationService, ResourceService, S3Service} +import cats.effect.{ExitCode, IO, IOApp} +import ch.qos.logback.classic.Level +import com.comcast.ip4s.{Host, Port} +import org.http4s.client.Client +import org.http4s.client.middleware.Logger +import org.http4s.ember.client.EmberClientBuilder +import org.http4s.ember.server.EmberServerBuilder +import org.http4s.server.Router +import sttp.client3.http4s.Http4sBackend +import sttp.tapir.server.http4s.{Http4sServerInterpreter, Http4sServerOptions} +import org.http4s.server.middleware.* +import org.slf4j.LoggerFactory + +object Main extends IOApp: + + val config: Config = Config.load() + private val serverOptions: Http4sServerOptions[IO] = + Http4sServerOptions + .customiseInterceptors[IO] + .metricsInterceptor(Endpoints.prometheusMetrics.metricsInterceptor()) + .options + + override def run(args: List[String]): IO[ExitCode] = { + setLogLevel() + for { + client: Client[IO] <- EmberClientBuilder + .default[IO] + .build + finalClient = setHttpClientLogger(client) + backend = Http4sBackend.usingClient(client) + authorizationService = new AuthorizationService(config.auth, backend) + resourceService = new ResourceService(config.auth, backend) + s3Service = new S3Service(config.s3Config) + routes = Http4sServerInterpreter[IO](serverOptions).toRoutes(Endpoints.all(config, authorizationService, resourceService, s3Service)) + withCors = CORS.policy.withAllowOriginAll(routes) + _ <- EmberServerBuilder + .default[IO] + .withHost(Host.fromString(config.http.host).get) + .withPort(Port.fromInt(config.http.port).get) + .withHttpApp(Router("/" -> withCors).orNotFound) + .build + + + } yield () + }.useForever + +def setLogLevel(): Unit = { + sys.env.get("LOG_LEVEL") + .foreach { envLogLeve => + import ch.qos.logback.classic.Logger + val rootLogger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[Logger] + rootLogger.setLevel(Level.toLevel(envLogLeve, Level.WARN)) + } + +} + +def setHttpClientLogger(client: Client[IO]): Client[IO] = { + sys.env.get("LOG_LEVEL") match + case Some("DEBUG") => Logger(logHeaders = true, logBody = true)(client) + case _ => client +} + + + diff --git a/src/main/scala/bio/ferlab/ferload/endpoints/ConfigEndpoint.scala b/src/main/scala/bio/ferlab/ferload/endpoints/ConfigEndpoint.scala new file mode 100644 index 0000000..c6d6145 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/endpoints/ConfigEndpoint.scala @@ -0,0 +1,32 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.{Config, FerloadClientConfig} +import bio.ferlab.ferload.model.{FerloadConfig, KeycloakConfig, TokenConfig} +import cats.effect.IO +import io.circe.generic.auto.* +import sttp.tapir.* +import sttp.tapir.Schema.annotations.encodedName +import sttp.tapir.generic.auto.* +import sttp.tapir.json.circe.* +import sttp.tapir.server.ServerEndpoint + +object ConfigEndpoint: + + + private val configEndpoint: PublicEndpoint[Unit, Unit, FerloadConfig, Any] = endpoint.get + .in("config") + .out(jsonBody[FerloadConfig]) + + def configServerEndpoint(config: Config): ServerEndpoint[Any, IO] = configEndpoint.serverLogicSuccess(_ => { + if (config.ferloadClientConfig.method == FerloadClientConfig.TOKEN) { + val tokenConfig = TokenConfig(config.ferloadClientConfig.tokenLink.get, config.ferloadClientConfig.tokenHelper) + IO.pure(FerloadConfig(config.ferloadClientConfig.method, None, Some(tokenConfig))) + } else if (config.ferloadClientConfig.method == FerloadClientConfig.PASSWORD) { + val kc = KeycloakConfig(config.auth.authUrl, config.auth.realm, config.ferloadClientConfig.clientId.get, config.auth.clientId) + IO.pure(FerloadConfig(config.ferloadClientConfig.method, Some(kc), None)) + } + else { + IO.raiseError(new IllegalStateException(s"Invalid configuration type ${config.ferloadClientConfig.method}")) + } + + }) diff --git a/src/main/scala/bio/ferlab/ferload/endpoints/DrsEndpoints.scala b/src/main/scala/bio/ferlab/ferload/endpoints/DrsEndpoints.scala new file mode 100644 index 0000000..0e30e64 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/endpoints/DrsEndpoints.scala @@ -0,0 +1,107 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.model.ErrorResponse +import bio.ferlab.ferload.model.drs.* +import bio.ferlab.ferload.model.drs.CreateDrsObject.toResource +import bio.ferlab.ferload.services.{AuthorizationService, ResourceService, S3Service} +import bio.ferlab.ferload.{Config, DrsConfig} +import cats.effect.IO +import cats.implicits.* +import io.circe.generic.auto.* +import sttp.client3.HttpError +import sttp.model.StatusCode +import sttp.tapir.* +import sttp.tapir.generic.auto.* +import sttp.tapir.json.circe.* +import sttp.tapir.server.ServerEndpoint + +object DrsEndpoints: + val baseEndpoint: Endpoint[Unit, Unit, Unit, Unit, Any] = endpoint + .prependSecurityIn("ga4gh") + .prependSecurityIn("drs") + .prependSecurityIn("v1") + + private val service = baseEndpoint.get + .in("service-info") + .out(jsonBody[Service]) + + private val objectEnpoint: Endpoint[Unit, Unit, Unit, Unit, Any] = baseEndpoint + .securityIn("objects") + + private def serviceServer(drsConfig: DrsConfig) = service.serverLogicSuccess(_ => + Service( + id = drsConfig.id, + name = drsConfig.name, + organization = ServiceOrganization(drsConfig.organizationName, drsConfig.organizationUrl), + `type` = ServiceType("org.ga4gh", "drs", "1.3.0"), + version = drsConfig.version, + description = drsConfig.description, + contactUrl = drsConfig.contactUrl, + documentationUrl = drsConfig.documentationUrl, + createdAt = None, + updatedAt = None, + environment = drsConfig.environment + ).pure[IO] + ) + + private def objectInfo: Endpoint[Unit, String, (StatusCode, ErrorResponse), Authorizations, Any] = + objectEnpoint.in(path[String].name("object_id")) + .options + .errorOut(statusCode.and(jsonBody[ErrorResponse])) + .out(jsonBody[Authorizations]) + + private def getObject(authorizationService: AuthorizationService) = + objectEnpoint + .securityIn(auth.bearer[String]()) + .securityIn(path[String].name("object_id")) + .errorOut(statusCode.and(jsonBody[ErrorResponse])) + .serverSecurityLogic((token, objectId) => authorizationService.authLogic(token, Seq(objectId))) + .get + .out(jsonBody[DrsObject]) + + private val createObject: Endpoint[Unit, (String, CreateDrsObject), (StatusCode, ErrorResponse), StatusCode, Any] = + baseEndpoint + .in("object") + .in(auth.bearer[String]()) + .in(jsonBody[CreateDrsObject]) + .errorOut(statusCode.and(jsonBody[ErrorResponse])) + .out(statusCode) + .post + + private def objectInfoServer(config: Config, resourceService: ResourceService) = objectInfo.serverLogic { objectId => + for { + existResource <- resourceService.existResource(objectId) + } yield existResource match { + case StatusCode.Ok => Right(Authorizations(Some(List("BearerAuth")), None, Some(List(s"${config.auth.authUrl}/realms/${config.auth.realm}")))) + case StatusCode.NotFound => Left((StatusCode.NotFound, ErrorResponse(s"Object $objectId not found", 404))) + case e => throw new IllegalStateException(s"Unexpected status code: ${e.code}") + } + } + + private def getObjectServer(config: Config, authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service) = getObject(authorizationService).serverLogicSuccess { user => + _ => + for { + resource <- resourceService.getResourceById(user.permissions.head.resource_id) + bucketAndPath <- IO.fromTry(S3Service.parseS3Urls(resource.uris)) + (bucket, path) = bucketAndPath + url = s3Service.presignedUrl(bucket, path) + } yield DrsObject.build(resource, url, config.drsConfig.selfHost) + } + + private def createObjectServer(config: Config, resourceService: ResourceService) = createObject.serverLogicSuccess { (token, createDrsObject) => + val existResources = resourceService.existResource(createDrsObject.id) + existResources.flatMap { + case StatusCode.Ok => resourceService.updateResource(token, toResource(createDrsObject)) + case StatusCode.NotFound => resourceService.createResource(token, toResource(createDrsObject)) + case e => IO.raiseError(new IllegalStateException(s"Unexpected status code: $e")) + } + + + } + + def all(config: Config, authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service): Seq[ServerEndpoint[Any, IO]] = Seq( + serviceServer(config.drsConfig), + objectInfoServer(config, resourceService), + getObjectServer(config, authorizationService, resourceService, s3Service), + createObjectServer(config, resourceService) + ) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/endpoints/Endpoints.scala b/src/main/scala/bio/ferlab/ferload/endpoints/Endpoints.scala new file mode 100644 index 0000000..e5ff0da --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/endpoints/Endpoints.scala @@ -0,0 +1,43 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.Config +import bio.ferlab.ferload.endpoints.ConfigEndpoint.configServerEndpoint +import bio.ferlab.ferload.endpoints.LegacyObjectEndpoints.{objectByPathServer, listObjectsByPathServer} +import bio.ferlab.ferload.endpoints.ObjectsEndpoints.ById.singleObjectServer +import bio.ferlab.ferload.services.{AuthorizationService, ResourceService, S3Service} +import cats.effect.IO +import io.circe.generic.auto.* +import sttp.tapir.* +import sttp.tapir.server.ServerEndpoint +import sttp.tapir.server.metrics.prometheus.PrometheusMetrics +import sttp.tapir.swagger.bundle.SwaggerInterpreter + +object Endpoints: + private case class User(name: String) extends AnyVal + + val statusEndpoint: PublicEndpoint[Unit, Unit, String, Any] = endpoint.get + .in("status") + .out(stringBody) + + val statusServerEndpoint: ServerEndpoint[Any, IO] = statusEndpoint.serverLogicSuccess(_ => IO.pure("OK!")) + + private def apiEndpoints(config: Config, authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service): List[ServerEndpoint[Any, IO]] = List( + statusServerEndpoint, + configServerEndpoint(config), + ) ++ ObjectsEndpoints.all(config, authorizationService, resourceService, s3Service) + ++ DrsEndpoints.all(config, authorizationService, resourceService, s3Service) + ++ LegacyObjectEndpoints.all(config, authorizationService, s3Service) + + private def docEndpoints(apiEndpoints: List[ServerEndpoint[_, IO]]): List[ServerEndpoint[Any, IO]] = SwaggerInterpreter() + .fromServerEndpoints[IO](apiEndpoints, "ferload", "1.0.0") + + val prometheusMetrics: PrometheusMetrics[IO] = PrometheusMetrics.default[IO]() + private val metricsEndpoint: ServerEndpoint[Any, IO] = prometheusMetrics.metricsEndpoint + + def all(config: Config, authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service): List[ServerEndpoint[Any, IO]] = { + val api = apiEndpoints(config, authorizationService, resourceService, s3Service) + + docEndpoints(api) ++ api ++ List(metricsEndpoint) + } + + diff --git a/src/main/scala/bio/ferlab/ferload/endpoints/LegacyObjectEndpoints.scala b/src/main/scala/bio/ferlab/ferload/endpoints/LegacyObjectEndpoints.scala new file mode 100644 index 0000000..01babf7 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/endpoints/LegacyObjectEndpoints.scala @@ -0,0 +1,70 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.Config +import bio.ferlab.ferload.model.{ErrorResponse, ObjectUrl, User} +import bio.ferlab.ferload.services.{AuthorizationService, S3Service} +import cats.effect.IO +import cats.implicits.* +import io.circe.generic.auto.* +import sttp.model.StatusCode +import sttp.tapir.* +import sttp.tapir.generic.auto.* +import sttp.tapir.json.circe.* +import sttp.tapir.server.* + +object LegacyObjectEndpoints: + + + private def securedGlobalEndpoint(authorizationService: AuthorizationService, resourceGlobalName: String): PartialServerEndpoint[String, User, Unit, (StatusCode, ErrorResponse), Unit, Any, IO] = + endpoint + .securityIn(auth.bearer[String]()) + .errorOut(statusCode.and(jsonBody[ErrorResponse])) + .serverSecurityLogic(token => authorizationService.authLogic(token, Seq(resourceGlobalName))) + + private def objectByPath(authorizationService: AuthorizationService, resourceGlobalName: String): PartialServerEndpoint[String, User, List[String], (StatusCode, ErrorResponse), ObjectUrl, Any, IO] = + securedGlobalEndpoint(authorizationService, resourceGlobalName) + .get + .description("Retrieve an object by its path and return an url to download it") + .deprecated() + .in(paths.description("Path of the object to retrieve")) + .out(jsonBody[ObjectUrl]) + + private def objectsByPaths(authorizationService: AuthorizationService, resourceGlobalName: String): PartialServerEndpoint[String, User, String, (StatusCode, ErrorResponse), Map[String, String], Any, IO] = + securedGlobalEndpoint(authorizationService, resourceGlobalName) + .description("Retrieve a list of objects by their paths and return a list of download URLs for each object") + .deprecated() + .post + .in("downloadLinks") + .in(stringBody.description("List of URLs of objects to retrieve").example("file1.vcf\nfile2.vcf")) + .out(jsonBody[Map[String, String]] + .description("List of files URLs by object path") + .example(Map("file1.vcf" -> "https://file1.vcf", "file2.vcf" -> "https://file2.vcf")) + ) + + def objectByPathServer(authorizationService: AuthorizationService, s3Service: S3Service, resourceGlobalName: String, defaultBucket: String): ServerEndpoint[Any, IO] = + objectByPath(authorizationService, resourceGlobalName).serverLogicSuccess { user => + file => s3Service.presignedUrl(defaultBucket, file.mkString("/")).pure[IO].map(ObjectUrl.apply) + } + + def listObjectsByPathServer(authorizationService: AuthorizationService, s3Service: S3Service, resourceGlobalName: String, defaultBucket: String): ServerEndpoint[Any, IO] = + objectsByPaths(authorizationService, resourceGlobalName).serverLogicSuccess { user => + files => + files.split("\n") + .toList + .traverse(file => s3Service.presignedUrl(defaultBucket, file).pure[IO].map(u => file -> u)).map(_.toMap) + } + + def all(config: Config, authorizationService: AuthorizationService, s3Service: S3Service): Seq[ServerEndpoint[Any, IO]] = { + val s: Option[List[ServerEndpoint[Any, IO]]] = for { + b <- config.s3Config.defaultBucket + r <- config.auth.resourcesGlobalName + servers = List( + listObjectsByPathServer(authorizationService, s3Service, r, b), + objectByPathServer(authorizationService, s3Service, r, b) + ) + } yield servers + s.getOrElse(Nil) + } + + + diff --git a/src/main/scala/bio/ferlab/ferload/endpoints/ObjectsEndpoints.scala b/src/main/scala/bio/ferlab/ferload/endpoints/ObjectsEndpoints.scala new file mode 100644 index 0000000..1c53a64 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/endpoints/ObjectsEndpoints.scala @@ -0,0 +1,123 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.Config +import bio.ferlab.ferload.endpoints.SecuredEndpoint.baseEndpoint +import bio.ferlab.ferload.model.{ErrorResponse, ObjectUrl, ReadResource, User} +import bio.ferlab.ferload.services.{AuthorizationService, ResourceService, S3Service} +import cats.effect.IO +import cats.implicits.* +import sttp.model.StatusCode +import sttp.tapir.* +import sttp.tapir.generic.auto.* +import sttp.tapir.json.circe.* +import sttp.tapir.server.* +import io.circe.generic.auto.* + +object ObjectsEndpoints: + + + object ById: + + private val byIdEndpoint = baseEndpoint.securityIn("objects") + + private def singleObject(authorizationService: AuthorizationService): PartialServerEndpoint[(String, String), User, Unit, (StatusCode, ErrorResponse), ObjectUrl, Any, IO] = byIdEndpoint + .get + .securityIn(path[String].name("object_id")) + .serverSecurityLogic((token, objectId) => authorizationService.authLogic(token, Seq(objectId))) + .description("Retrieve an object by its id and return an url to download it") + .out(jsonBody[ObjectUrl]) + + private def listObjects(authorizationService: AuthorizationService): PartialServerEndpoint[(String, String), User, Unit, (StatusCode, ErrorResponse), Map[String, String], Any, IO] = byIdEndpoint + .post + .securityIn("list") + .securityIn(stringBody.description("List of ids of objects to retrieve").example("FI1\nFI2")) + .serverSecurityLogic((token, objects) => authorizationService.authLogic(token, objects.split("\n"))) + .description("Retrieve an object by its id and return an url to download it") + .out(jsonBody[Map[String, String]] + .description("List of files URLs by object id") + .example(Map("FI1" -> "https://file1.vcf", "FI2" -> "https://file2.vcf"))) + + + def singleObjectServer(authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service): ServerEndpoint[Any, IO] = + singleObject(authorizationService).serverLogicSuccess { user => + _ => + for { + resource <- resourceService.getResourceById(user.permissions.head.resource_id) + bucketAndPath <- IO.fromTry(S3Service.parseS3Urls(resource.uris)) + (bucket, path) = bucketAndPath + url = s3Service.presignedUrl(bucket, path) + } yield ObjectUrl(url) + + } + + + def listObjectsServer(authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service): ServerEndpoint[Any, IO] = + listObjects(authorizationService).serverLogicSuccess { user => + _ => + val resourcesIO: IO[List[ReadResource]] = user.permissions.toList.traverse(p => resourceService.getResourceById(p.resource_id)) + resourcesIO.map { resources => + val urls: Seq[(String, (String, String))] = resources.flatMap(r => S3Service.parseS3Urls(r.uris).toOption.map(r.name -> _)) + val m: Map[String, String] = urls.map { case (name, (bucket, path)) => name -> s3Service.presignedUrl(bucket, path) }.toMap + m + } + + + } + + def all(authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service): Seq[ServerEndpoint[Any, IO]] = List( + singleObjectServer(authorizationService, resourceService, s3Service), + listObjectsServer(authorizationService, resourceService, s3Service) + ) + + object ByPath: + private def byPathEndpoint(authorizationService: AuthorizationService, resourceGlobalName: String): PartialServerEndpoint[String, User, Unit, (StatusCode, ErrorResponse), Unit, Any, IO] = + baseEndpoint + .securityIn("objects") + .securityIn("bypath") + .serverSecurityLogic(token => authorizationService.authLogic(token, Seq(resourceGlobalName))) + + private def singleObject(authorizationService: AuthorizationService, resourceGlobalName: String): PartialServerEndpoint[String, User, String, (StatusCode, ErrorResponse), ObjectUrl, Any, IO] = + byPathEndpoint(authorizationService, resourceGlobalName) + .get + .description("Retrieve an object by its path and return an url to download it") + .in(query[String]("path").description("Path of the object to retrieve").example("dir1/file1.vcf")) + .out(jsonBody[ObjectUrl]) + + def singleObjectServer(authorizationService: AuthorizationService, s3Service: S3Service, resourceGlobalName: String, defaultBucket: String): ServerEndpoint[Any, IO] = + singleObject(authorizationService, resourceGlobalName).serverLogicSuccess { user => + file => s3Service.presignedUrl(defaultBucket, file).pure[IO].map(ObjectUrl.apply) + } + + private def listObjects(authorizationService: AuthorizationService, resourceGlobalName: String): PartialServerEndpoint[String, User, String, (StatusCode, ErrorResponse), Map[String, String], Any, IO] = byPathEndpoint(authorizationService, resourceGlobalName) + .description("Retrieve a list of objects by their path and return a list of download URLs for each object") + .post + .in("list") + .in(stringBody.description("List of URLs of objects to retrieve").example("file1.vcf\nfile2.vcf")) + .out(jsonBody[Map[String, String]] + .description("List of files URLs by file name") + .example(Map("dir1/file1.vcf" -> "https://file1.vcf", "dir1/file2.vcf" -> "https://file2.vcf")) + ) + + def listObjectsServer(authorizationService: AuthorizationService, s3Service: S3Service, resourceGlobalName: String, defaultBucket: String): ServerEndpoint[Any, IO] = listObjects(authorizationService, resourceGlobalName).serverLogicSuccess { user => + files => + files.split("\n") + .toList + .traverse(file => s3Service.presignedUrl(defaultBucket, file).pure[IO].map(u => file -> u)).map(_.toMap) + } + + def all(config: Config, authorizationService: AuthorizationService, s3Service: S3Service): Seq[ServerEndpoint[Any, IO]] = { + val s: Option[List[ServerEndpoint[Any, IO]]] = for { + b <- config.s3Config.defaultBucket + r <- config.auth.resourcesGlobalName + servers = List( + singleObjectServer(authorizationService, s3Service, r, b), + listObjectsServer(authorizationService, s3Service, r, b) + ) + } yield servers + s.getOrElse(Nil) + + } + + def all(config: Config, authorizationService: AuthorizationService, resourceService: ResourceService, s3Service: S3Service): Seq[ServerEndpoint[Any, IO]] = + ByPath.all(config, authorizationService, s3Service) ++ ById.all(authorizationService, resourceService, s3Service) + diff --git a/src/main/scala/bio/ferlab/ferload/endpoints/SecuredEndpoint.scala b/src/main/scala/bio/ferlab/ferload/endpoints/SecuredEndpoint.scala new file mode 100644 index 0000000..2674d31 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/endpoints/SecuredEndpoint.scala @@ -0,0 +1,27 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.model.{ErrorResponse, Permissions, User} +import bio.ferlab.ferload.services.AuthorizationService +import cats.effect.IO +import io.circe.Json +import io.circe.generic.auto.* +import org.http4s.circe.* +import org.http4s.client.Client +import org.http4s.ember.client.EmberClientBuilder +import org.http4s.implicits.uri +import org.http4s.{Method, Request} +import sttp.client3.HttpError +import sttp.model.StatusCode +import sttp.tapir.* +import sttp.tapir.generic.auto.* +import sttp.tapir.json.circe.jsonBody +import sttp.tapir.server.* + +import scala.util.Right + +object SecuredEndpoint: + val baseEndpoint: Endpoint[String, Unit, (StatusCode, ErrorResponse), Unit, Any] = endpoint + .securityIn(auth.bearer[String]()) + .errorOut(statusCode.and(jsonBody[ErrorResponse])) + + diff --git a/src/main/scala/bio/ferlab/ferload/model/ErrorResponse.scala b/src/main/scala/bio/ferlab/ferload/model/ErrorResponse.scala new file mode 100644 index 0000000..05227de --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/ErrorResponse.scala @@ -0,0 +1,3 @@ +package bio.ferlab.ferload.model + +case class ErrorResponse(msg: String, statusCode: Int) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/FerloadConfig.scala b/src/main/scala/bio/ferlab/ferload/model/FerloadConfig.scala new file mode 100644 index 0000000..6185c0d --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/FerloadConfig.scala @@ -0,0 +1,11 @@ +package bio.ferlab.ferload.model + +import sttp.tapir.Schema.annotations.encodedName + +import scala.annotation.targetName + +case class FerloadConfig(method: String, keycloak: Option[KeycloakConfig], tokenConfig: Option[TokenConfig]) + +case class KeycloakConfig(url: String, realm: String, `client-id`: String, audience: String) + +case class TokenConfig(link: String, helper: Option[String]) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/IntrospectResponse.scala b/src/main/scala/bio/ferlab/ferload/model/IntrospectResponse.scala new file mode 100644 index 0000000..1dfd6f7 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/IntrospectResponse.scala @@ -0,0 +1,3 @@ +package bio.ferlab.ferload.model + +case class IntrospectResponse(active: Boolean, exp: Option[Int], iat: Option[Int], aud: Option[String], nbf: Option[Int], permissions: Option[Seq[Permissions]]) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/ObjectUrl.scala b/src/main/scala/bio/ferlab/ferload/model/ObjectUrl.scala new file mode 100644 index 0000000..38a3868 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/ObjectUrl.scala @@ -0,0 +1,3 @@ +package bio.ferlab.ferload.model + +case class ObjectUrl(url: String) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/PartyToken.scala b/src/main/scala/bio/ferlab/ferload/model/PartyToken.scala new file mode 100644 index 0000000..8766b52 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/PartyToken.scala @@ -0,0 +1,3 @@ +package bio.ferlab.ferload.model + +case class PartyToken(access_token: String, expires_in: Int, refresh_expires_in: Int, refresh_token: Option[String], token_type: String) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/Permissions.scala b/src/main/scala/bio/ferlab/ferload/model/Permissions.scala new file mode 100644 index 0000000..ee4281a --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/Permissions.scala @@ -0,0 +1,3 @@ +package bio.ferlab.ferload.model + +case class Permissions(resource_id: String, rsname: Option[String], resource_scopes: Seq[String]) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/ReadResource.scala b/src/main/scala/bio/ferlab/ferload/model/ReadResource.scala new file mode 100644 index 0000000..42c1d67 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/ReadResource.scala @@ -0,0 +1,51 @@ +package bio.ferlab.ferload.model + +import io.circe.generic.semiauto.deriveDecoder +import io.circe.generic.semiauto.deriveEncoder +import sttp.tapir.Schema.annotations.encodedName +import io.circe.{Decoder, Encoder} + +case class ReadResource(id: String, + name: String, + displayName: Option[String], + resourceType: Option[String], + attributes: Map[String, List[String]], + uris: Seq[String], + resourceScopes: Option[Seq[ResourceScope]], + scopes: Option[Seq[ResourceScope]] + ) + +object ReadResource { + implicit val decoder: Decoder[ReadResource] = + Decoder.forProduct8("_id", "name", "displayName", "type", "attributes", "uris", "resource_scopes", "scopes")(ReadResource.apply) + + implicit val encoder: Encoder[ReadResource] = + Encoder.forProduct8("_id", "name", "displayName", "type", "attributes", "uris", "resource_scopes", "scopes")(r => + (r.id, r.name, r.displayName, r.resourceType, r.attributes, r.uris, r.resourceScopes, r.scopes) + ) +} + +case class ResourceScope(name: String) + +object ResourceScope { + implicit val decoder: Decoder[ResourceScope] = deriveDecoder[ResourceScope] + implicit val encoder: Encoder[ResourceScope] = deriveEncoder[ResourceScope] +} + +case class WriteResource(id: String, + name: String, + displayName: Option[String], + resourceType: Option[String], + attributes: Map[String, List[String]], + uris: Seq[String], + resourceScopes: Option[Seq[String]]) + +object WriteResource { + implicit val decoder: Decoder[WriteResource] = + Decoder.forProduct7("_id", "name", "displayName", "type", "attributes", "uris", "resource_scopes")(WriteResource.apply) + + implicit val encoder: Encoder[WriteResource] = + Encoder.forProduct7("_id", "name", "displayName", "type", "attributes", "uris", "resource_scopes")(r => + (r.id, r.name, r.displayName, r.resourceType, r.attributes, r.uris, r.resourceScopes) + ) +} \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/User.scala b/src/main/scala/bio/ferlab/ferload/model/User.scala new file mode 100644 index 0000000..3a1f304 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/User.scala @@ -0,0 +1,3 @@ +package bio.ferlab.ferload.model + +case class User(token: String, permissions: Set[Permissions]) \ No newline at end of file diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/AccessMethod.scala b/src/main/scala/bio/ferlab/ferload/model/drs/AccessMethod.scala new file mode 100644 index 0000000..c8e0909 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/AccessMethod.scala @@ -0,0 +1,17 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param `type` Type of the access method. + * @param access_url + * @param access_id An arbitrary string to be passed to the `/access` method to get an `AccessURL`. This string must be unique within the scope of a single object. Note that at least one of `access_url` and `access_id` must be provided. + * @param region Name of the region in the cloud service provider that the object belongs to. for example: ''us-east-1'' + * @param authorizations + */ +case class AccessMethod ( + `type`: String, + access_url: Option[AccessURL], + access_id: Option[String], + region: Option[String], + authorizations: Option[AllOfAccessMethodAuthorizations] +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/AccessURL.scala b/src/main/scala/bio/ferlab/ferload/model/drs/AccessURL.scala new file mode 100644 index 0000000..8256b9b --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/AccessURL.scala @@ -0,0 +1,11 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param url A fully resolvable URL that can be used to fetch the actual object bytes. + * @param headers An optional list of headers to include in the HTTP request to `url`. These headers can be used to provide auth tokens required to fetch the object bytes. for example: ''Authorization: Basic Z2E0Z2g6ZHJz'' + */ +case class AccessURL ( + url: String, + headers: Option[List[String]] +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/AllOfAccessMethodAuthorizations.scala b/src/main/scala/bio/ferlab/ferload/model/drs/AllOfAccessMethodAuthorizations.scala new file mode 100644 index 0000000..7c7a8e9 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/AllOfAccessMethodAuthorizations.scala @@ -0,0 +1,13 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param supported_types An Optional list of support authorization types. More than one can be supported and tried in sequence. Defaults to `None` if empty or missing. + * @param passport_auth_issuers If authorizations contain `PassportAuth` this is a required list of visa issuers (as found in a visa's `iss` claim) that may authorize access to this object. The caller must only provide passports that contain visas from this list. It is strongly recommended that the caller validate that it is appropriate to send the requested passport/visa to the DRS server to mitigate attacks by malicious DRS servers requesting credentials they should not have. + * @param bearer_auth_issuers If authorizations contain `BearerAuth` this is an optional list of issuers that may authorize access to this object. The caller must provide a token from one of these issuers. If this is empty or missing it assumed the caller knows which token to send via other means. It is strongly recommended that the caller validate that it is appropriate to send the requested token to the DRS server to mitigate attacks by malicious DRS servers requesting credentials they should not have. + */ +case class AllOfAccessMethodAuthorizations ( + supported_types: Option[List[String]], + passport_auth_issuers: Option[List[String]], + bearer_auth_issuers: Option[List[String]] +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/Authorizations.scala b/src/main/scala/bio/ferlab/ferload/model/drs/Authorizations.scala new file mode 100644 index 0000000..0e43374 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/Authorizations.scala @@ -0,0 +1,13 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param supported_types An Optional list of support authorization types. More than one can be supported and tried in sequence. Defaults to `None` if empty or missing. + * @param passport_auth_issuers If authorizations contain `PassportAuth` this is a required list of visa issuers (as found in a visa's `iss` claim) that may authorize access to this object. The caller must only provide passports that contain visas from this list. It is strongly recommended that the caller validate that it is appropriate to send the requested passport/visa to the DRS server to mitigate attacks by malicious DRS servers requesting credentials they should not have. + * @param bearer_auth_issuers If authorizations contain `BearerAuth` this is an optional list of issuers that may authorize access to this object. The caller must provide a token from one of these issuers. If this is empty or missing it assumed the caller knows which token to send via other means. It is strongly recommended that the caller validate that it is appropriate to send the requested token to the DRS server to mitigate attacks by malicious DRS servers requesting credentials they should not have. + */ +case class Authorizations ( + supported_types: Option[List[String]], + passport_auth_issuers: Option[List[String]], + bearer_auth_issuers: Option[List[String]] +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/Checksum.scala b/src/main/scala/bio/ferlab/ferload/model/drs/Checksum.scala new file mode 100644 index 0000000..a279e56 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/Checksum.scala @@ -0,0 +1,11 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param checksum The hex-string encoded checksum for the data + * @param `type` The digest method used to create the checksum. The value (e.g. `sha-256`) SHOULD be listed as `Hash Name String` in the https://www.iana.org/assignments/named-information/named-information.xhtml#hash-alg[IANA Named Information Hash Algorithm Registry]. Other values MAY be used, as long as implementors are aware of the issues discussed in https://tools.ietf.org/html/rfc6920#section-9.4[RFC6920]. GA4GH may provide more explicit guidance for use of non-IANA-registered algorithms in the future. Until then, if implementers do choose such an algorithm (e.g. because it's implemented by their storage provider), they SHOULD use an existing standard `type` value such as `md5`, `etag`, `crc32c`, `trunc512`, or `sha1`. for example: ''sha-256'' + */ +case class Checksum ( + checksum: String, + `type`: String +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/ContentsObject.scala b/src/main/scala/bio/ferlab/ferload/model/drs/ContentsObject.scala new file mode 100644 index 0000000..08aaec8 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/ContentsObject.scala @@ -0,0 +1,13 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param name A name declared by the bundle author that must be used when materialising this object, overriding any name directly associated with the object itself. The name must be unique within the containing bundle. This string is made up of uppercase and lowercase letters, decimal digits, hyphen, period, and underscore [A-Za-z0-9.-_]. See http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_282[portable filenames]. + * @param id A DRS identifier of a `DrsObject` (either a single blob or a nested bundle). If this ContentsObject is an object within a nested bundle, then the id is optional. Otherwise, the id is required. + * @param drs_uri A list of full DRS identifier URI paths that may be used to obtain the object. These URIs may be external to this DRS instance. for example: ''drs://drs.example.org/314159'' + */ +case class ContentsObject ( + name: String, + id: Option[String], + drs_uri: Option[List[String]] +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/CreateDrsObject.scala b/src/main/scala/bio/ferlab/ferload/model/drs/CreateDrsObject.scala new file mode 100644 index 0000000..c47cf3e --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/CreateDrsObject.scala @@ -0,0 +1,45 @@ +package bio.ferlab.ferload.model.drs + +import bio.ferlab.ferload.model.{ReadResource, ResourceScope, WriteResource} + +import java.time.LocalDateTime + +case class CreateDrsObject( + id: String, + name: Option[String], + size: Option[Long], + created_time: Option[LocalDateTime], + updated_time: Option[LocalDateTime], + version: Option[String], + mime_type: Option[String], + checksums: Option[List[Checksum]], + description: Option[String], + aliases: Option[List[String]], + uris: List[String], + scopes: Option[List[String]] + ) + +object CreateDrsObject { + def toResource(obj: CreateDrsObject): WriteResource = { + val attributes: Map[String, List[String]] = Seq( + obj.size.map(s => "size" -> List(s.toString)), + obj.created_time.map(ct => "created_time" -> List(ct.toString)), + obj.updated_time.map(ut => "updated_time" -> List(ut.toString)), + obj.version.map(v => "version" -> List(v)), + obj.mime_type.map(mt => "mime_type" -> List(mt)), + obj.checksums.map(cs => "checksum" -> cs.map(c => s"${c.`type`}:${c.checksum}")), + obj.description.map(d => "description" -> List(d)), + obj.aliases.map(a => "aliases" -> a) + ).flatten.toMap + WriteResource( + id = obj.id, + name = obj.id, + displayName = obj.name, + resourceType = Some("FILE"), + attributes = attributes, + uris = obj.uris, + resourceScopes = obj.scopes + ) + } +} + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/DrsObject.scala b/src/main/scala/bio/ferlab/ferload/model/drs/DrsObject.scala new file mode 100644 index 0000000..6f3f1f8 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/DrsObject.scala @@ -0,0 +1,84 @@ +package bio.ferlab.ferload.model.drs + +import bio.ferlab.ferload.model.ReadResource + +import java.time.LocalDateTime + +/** + * @param id An identifier unique to this `DrsObject` + * @param name A string that can be used to name a `DrsObject`. This string is made up of uppercase and lowercase letters, decimal digits, hyphen, period, and underscore [A-Za-z0-9.-_]. See http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_282[portable filenames]. + * @param self_uri A drs:// hostname-based URI, as defined in the DRS documentation, that tells clients how to access this object. The intent of this field is to make DRS objects self-contained, and therefore easier for clients to store and pass around. For example, if you arrive at this DRS JSON by resolving a compact identifier-based DRS URI, the `self_uri` presents you with a hostname and properly encoded DRS ID for use in subsequent `access` endpoint calls. for example: ''drs://drs.example.org/314159'' + * @param size For blobs, the blob size in bytes. For bundles, the cumulative size, in bytes, of items in the `contents` field. + * @param created_time Timestamp of content creation in RFC3339. (This is the creation time of the underlying content, not of the JSON object.) + * @param updated_time Timestamp of content update in RFC3339, identical to `created_time` in systems that do not support updates. (This is the update time of the underlying content, not of the JSON object.) + * @param version A string representing a version. (Some systems may use checksum, a RFC3339 timestamp, or an incrementing version number.) + * @param mime_type A string providing the mime-type of the `DrsObject`. for example: ''application/json'' + * @param checksums The checksum of the `DrsObject`. At least one checksum must be provided. For blobs, the checksum is computed over the bytes in the blob. For bundles, the checksum is computed over a sorted concatenation of the checksums of its top-level contained objects (not recursive, names not included). The list of checksums is sorted alphabetically (hex-code) before concatenation and a further checksum is performed on the concatenated checksum value. For example, if a bundle contains blobs with the following checksums: md5(blob1) = 72794b6d md5(blob2) = 5e089d29 Then the checksum of the bundle is: md5( concat( sort( md5(blob1), md5(blob2) ) ) ) = md5( concat( sort( 72794b6d, 5e089d29 ) ) ) = md5( concat( 5e089d29, 72794b6d ) ) = md5( 5e089d2972794b6d ) = f7a29a04 + * @param access_methods The list of access methods that can be used to fetch the `DrsObject`. Required for single blobs; optional for bundles. + * @param contents If not set, this `DrsObject` is a single blob. If set, this `DrsObject` is a bundle containing the listed `ContentsObject` s (some of which may be further nested). + * @param description A human readable description of the `DrsObject`. + * @param aliases A list of strings that can be used to find other metadata about this `DrsObject` from external metadata sources. These aliases can be used to represent secondary accession numbers or external GUIDs. + */ +case class DrsObject( + id: String, + name: Option[String], + self_uri: String, + size: Long, + created_time: LocalDateTime, + updated_time: Option[LocalDateTime], + version: Option[String], + mime_type: Option[String], + checksums: List[Checksum], + access_methods: Option[List[AccessMethod]], + contents: Option[List[ContentsObject]], + description: Option[String], + aliases: Option[List[String]] + ) + +object DrsObject { + def build(resource: ReadResource, presignedUrl: String, host: String): DrsObject = { + + val accessMethods = AccessMethod( + `type` = "https", + access_url = Some(AccessURL( + url = presignedUrl, + headers = None + )), + access_id = None, + region = None, + authorizations = None + + ) + build(resource, host).copy(access_methods = Some(List(accessMethods))) + + } + + def build(resource: ReadResource, host: String): DrsObject = { + + val checksums = resource.attributes.getOrElse("checksum", Nil).map { checksum => + val parts = checksum.split(":") + Checksum(checksum = parts.last, `type` = parts.head) + } + + DrsObject( + id = resource.name, + name = resource.displayName, + self_uri = s"drs://$host/${resource.name}", + size = firstAttribute(resource, "size").map(_.toLong).getOrElse(0L), + created_time = firstAttribute(resource, "created_time").map(LocalDateTime.parse).getOrElse(LocalDateTime.now()), + updated_time = firstAttribute(resource, "updated_time").map(LocalDateTime.parse), + version = firstAttribute(resource, "version"), + mime_type = firstAttribute(resource, "mime_type"), + checksums = checksums, + access_methods = None, + contents = None, + description = firstAttribute(resource, "description"), + aliases = resource.attributes.get("aliases") + ) + } + + private def firstAttribute(resource: ReadResource, key: String): Option[String] = { + resource.attributes.get(key).flatMap(_.headOption) + } +} + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/DrsService.scala b/src/main/scala/bio/ferlab/ferload/model/drs/DrsService.scala new file mode 100644 index 0000000..d2782f9 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/DrsService.scala @@ -0,0 +1,9 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param `type` + */ +case class DrsService ( + `type`: DrsServiceType +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/DrsServiceType.scala b/src/main/scala/bio/ferlab/ferload/model/drs/DrsServiceType.scala new file mode 100644 index 0000000..d8db6c0 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/DrsServiceType.scala @@ -0,0 +1,9 @@ +package bio.ferlab.ferload.model.drs + +/** + * @param artifact for example: ''drs'' + */ +case class DrsServiceType( + artifact: String +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/Error.scala b/src/main/scala/bio/ferlab/ferload/model/drs/Error.scala new file mode 100644 index 0000000..5de576d --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/Error.scala @@ -0,0 +1,13 @@ +package bio.ferlab.ferload.model.drs + +/** + * An object that can optionally include information about the error. + * + * @param msg A detailed error message. + * @param status_code The integer representing the HTTP status code (e.g. 200, 404). + */ +case class Error ( + msg: Option[String], + status_code: Option[Int] +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/Service.scala b/src/main/scala/bio/ferlab/ferload/model/drs/Service.scala new file mode 100644 index 0000000..8dc0700 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/Service.scala @@ -0,0 +1,34 @@ +package bio.ferlab.ferload.model.drs + +import java.time.LocalDateTime +import java.util.Date + +/** + * GA4GH service + * + * @param id Unique ID of this service. Reverse domain name notation is recommended, though not required. The identifier should attempt to be globally unique so it can be used in downstream aggregator services e.g. Service Registry. for example: ''org.ga4gh.myservice'' + * @param name Name of this service. Should be human readable. for example: ''My project'' + * @param `type` + * @param description Description of the service. Should be human readable and provide information about the service. for example: ''This service provides...'' + * @param organization + * @param contactUrl URL of the contact for the provider of this service, e.g. a link to a contact form (RFC 3986 format), or an email (RFC 2368 format). for example: ''mailto:support@example.com'' + * @param documentationUrl URL of the documentation of this service (RFC 3986 format). This should help someone learn how to use your service, including any specifics required to access data, e.g. authentication. for example: ''https://docs.myservice.example.com'' + * @param createdAt Timestamp describing when the service was first deployed and available (RFC 3339 format) for example: ''2019-06-04T12:58:19Z'' + * @param updatedAt Timestamp describing when the service was last updated (RFC 3339 format) for example: ''2019-06-04T12:58:19Z'' + * @param environment Environment the service is running in. Use this to distinguish between production, development and testing/staging deployments. Suggested values are prod, test, dev, staging. However this is advised and not enforced. for example: ''test'' + * @param version Version of the service being described. Semantic versioning is recommended, but other identifiers, such as dates or commit hashes, are also allowed. The version should be changed whenever the service is updated. for example: ''1.0.0'' + */ +case class Service( + id: String, + name: String, + `type`: ServiceType, + description: Option[String], + organization: ServiceOrganization, + contactUrl: Option[String], + documentationUrl: Option[String], + createdAt: Option[LocalDateTime], + updatedAt: Option[LocalDateTime], + environment: Option[String], + version: String + ) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/ServiceOrganization.scala b/src/main/scala/bio/ferlab/ferload/model/drs/ServiceOrganization.scala new file mode 100644 index 0000000..5d265c8 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/ServiceOrganization.scala @@ -0,0 +1,13 @@ +package bio.ferlab.ferload.model.drs + +/** + * Organization providing the service + * + * @param name Name of the organization responsible for the service for example: ''My organization'' + * @param url URL of the website of the organization (RFC 3986 format) for example: ''https://example.com'' + */ +case class ServiceOrganization( + name: String, + url: String +) + diff --git a/src/main/scala/bio/ferlab/ferload/model/drs/ServiceType.scala b/src/main/scala/bio/ferlab/ferload/model/drs/ServiceType.scala new file mode 100644 index 0000000..9cc74b5 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/model/drs/ServiceType.scala @@ -0,0 +1,15 @@ +package bio.ferlab.ferload.model.drs + +/** + * Type of a GA4GH service + * + * @param group Namespace in reverse domain name format. Use `org.ga4gh` for implementations compliant with official GA4GH specifications. For services with custom APIs not standardized by GA4GH, or implementations diverging from official GA4GH specifications, use a different namespace (e.g. your organization's reverse domain name). for example: ''org.ga4gh'' + * @param artifact Name of the API or GA4GH specification implemented. Official GA4GH types should be assigned as part of standards approval process. Custom artifacts are supported. for example: ''beacon'' + * @param version Version of the API or specification. GA4GH specifications use semantic versioning. for example: ''1.0.0'' + */ +case class ServiceType ( + group: String, + artifact: String, + version: String +) + diff --git a/src/main/scala/bio/ferlab/ferload/services/AuthorizationService.scala b/src/main/scala/bio/ferlab/ferload/services/AuthorizationService.scala new file mode 100644 index 0000000..b728fe1 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/services/AuthorizationService.scala @@ -0,0 +1,115 @@ +package bio.ferlab.ferload.services + +import bio.ferlab.ferload.AuthConfig +import bio.ferlab.ferload.model.* +import cats.effect.IO +import com.github.benmanes.caffeine.cache.{AsyncCacheLoader, AsyncLoadingCache, Caffeine, Expiry} +import io.circe.Error +import io.circe.generic.auto.* +import sttp.capabilities.fs2.Fs2Streams +import sttp.client3.* +import sttp.client3.circe.* +import sttp.model +import sttp.model.MediaType.ApplicationXWwwFormUrlencoded +import sttp.model.StatusCode + +import java.util.concurrent.Executor +import scala.concurrent.ExecutionContext + +/** + * Service used to authorize a user to access resources + * + * @param authConfig configuration of open id server + * @param backend sttp backend used to query the open id server + */ +class AuthorizationService(authConfig: AuthConfig, backend: SttpBackend[IO, Fs2Streams[IO]]) { + + /** + * Exchange a token for a Request Party Token (RPT) that can be used to access resources passed in the resources parameter. + * + * @param token the token to exchange + * @param resources the resources to access + * @return the RPT + */ + protected[services] def requestPartyToken(token: String, resources: Seq[String]): IO[String] = { + val body: Seq[(String, String)] = Seq( + "grant_type" -> "urn:ietf:params:oauth:grant-type:uma-ticket", + "audience" -> authConfig.clientId, + ) ++ resources.map(r => "permission" -> r) + + val auth: IO[Response[Either[ResponseException[String, Error], PartyToken]]] = basicRequest.post(uri"${authConfig.baseUri}/protocol/openid-connect/token") + .auth.bearer(token) + .contentType(ApplicationXWwwFormUrlencoded) + .body(body, "utf-8") + .response(asJson[PartyToken]) + .send(backend) + auth.flatMap(r => IO.fromEither(r.body).map(_.access_token)) + } + + /** + * Introspect a party token to get the permissions associated with it. + * + * @param partyToken the party token to introspect + * @return the response from the introspection endpoint + */ + protected[services] def introspectPartyToken(partyToken: String): IO[IntrospectResponse] = { + + val introspect = basicRequest.post(uri"${authConfig.baseUri}/protocol/openid-connect/token/introspect") + .contentType(ApplicationXWwwFormUrlencoded) + .body("token_type_hint" -> "requesting_party_token", + "token" -> partyToken, + "client_id" -> authConfig.clientId, + "client_secret" -> authConfig.clientSecret) + .response(asJson[IntrospectResponse]) + .send(backend) + introspect.flatMap(r => IO.fromEither(r.body)) + + } + + /** + * Validate a token and return the user with permissions if the token is valid and if user have access to the resources. Otherwise, return an error. + * + * @param token the token to validate + * @param resources the resources to access + * @return the user with permissions if the token is valid and if user have access to the resources. Otherwise, return errors (Unauthorized, Forbidden, NotFound). + */ + def authLogic(token: String, resources: Seq[String]): IO[Either[(StatusCode, ErrorResponse), User]] = { + val r: IO[User] = for { + partyToken <- requestPartyToken(token, resources) + permissionToken <- introspectPartyToken(partyToken) + } yield { + val value: Set[Permissions] = permissionToken.permissions.map(_.toSet).getOrElse(Set.empty) + User(partyToken, value) + } + + r.map { + case User(_, permissions) if containAllPermissions(resources, permissions) => Right(User(token, permissions)) + case _ => Left((StatusCode.Forbidden, ErrorResponse("Forbidden", 403))) + } + .recover { + case HttpError(_, statusCode) if Seq(StatusCode.Unauthorized, StatusCode.Forbidden).contains(statusCode) => Left((statusCode, ErrorResponse("Unauthorized", statusCode.code))).withRight[User] + case e: HttpError[String] if e.statusCode == StatusCode.BadRequest && e.body.contains("invalid_resource") => Left((StatusCode.NotFound, ErrorResponse("Not Found", 404))).withRight[User] + } + + } + + /** + * Verifies if the permissions contains all the resources. + * + * @param resources the resources to access + * @param permissions the permissions to check + * @return true if the permissions contains all the resources, false otherwise + */ + private def containAllPermissions(resources: Seq[String], permissions: Set[Permissions]): Boolean = { + resources.forall(r => { + val resourceInPermissions = permissions.flatMap(_.rsname) + resourceInPermissions.contains(r) + }) + } + +} + + + + + diff --git a/src/main/scala/bio/ferlab/ferload/services/ResourceService.scala b/src/main/scala/bio/ferlab/ferload/services/ResourceService.scala new file mode 100644 index 0000000..0319584 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/services/ResourceService.scala @@ -0,0 +1,144 @@ +package bio.ferlab.ferload.services + +import bio.ferlab.ferload.AuthConfig +import bio.ferlab.ferload.model.{PartyToken, Permissions, ReadResource, WriteResource} +import cats.effect.IO +import com.github.benmanes.caffeine.cache.{AsyncCacheLoader, AsyncLoadingCache, Caffeine, Expiry} +import io.circe.Error +import io.circe.generic.auto.* +import sttp.client3.circe.* +import sttp.capabilities.fs2.Fs2Streams +import sttp.client3.circe.asJson +import sttp.client3.{Response, ResponseException, SttpBackend, UriContext, basicRequest} +import sttp.model.MediaType.ApplicationXWwwFormUrlencoded +import sttp.model.StatusCode +import sttp.client3._ +import java.util.concurrent.Executor +import scala.concurrent.ExecutionContext + +class ResourceService(authConfig: AuthConfig, backend: SttpBackend[IO, Fs2Streams[IO]]) { + + /** + * Get a resource by id + * + * @param id id of the resource to get + * @return the resource if exist or an error + */ + def getResourceById(id: String): IO[ReadResource] = { + for { + token <- clientToken() + resource <- fetchResourceById(id, token.access_token) + } yield resource + + } + + private def resourceRequest(token: String) = { + basicRequest + .auth.bearer(token) + } + + /** + * Fetch a resource by id + * + * @param id id of the resource to get + * @param token token to use to fetch the resource + * @return the resource if exist or an error + */ + private def fetchResourceById(id: String, token: String): IO[ReadResource] = { + val auth: IO[Response[Either[ResponseException[String, Error], ReadResource]]] = resourceRequest(token) + .get(uri"${authConfig.baseUri}/authz/protection/resource_set/$id") + .response(asJson[ReadResource]) + .send(backend) + auth.flatMap(r => IO.fromEither(r.body)) + } + + def createResource(token: String, resource: WriteResource): IO[StatusCode] = { + val resp = resourceRequest(token) + .body(resource) + .post(uri"${authConfig.baseUri}/authz/protection/resource_set") + .response(asStringAlways) + .send(backend) + .flatMap { + case r if Seq(StatusCode.Unauthorized, StatusCode.Forbidden).contains(r.code) || r.isSuccess => IO.pure(r.code) + case r => IO.raiseError(HttpError(r.body, r.code)) + } + resp + } + + def updateResource(token: String, resource: WriteResource): IO[StatusCode] = { + val resp = resourceRequest(token) + .body(resource) + .put(uri"${authConfig.baseUri}/authz/protection/resource_set/${resource.id}") + .response(asStringAlways) + .send(backend) + .flatMap { + case r if Seq(StatusCode.Unauthorized, StatusCode.Forbidden).contains(r.code) || r.isSuccess => IO.pure(r.code) + case r => IO.raiseError(HttpError(r.body, r.code)) + } + resp + + } + + /** + * Check if a resource exist + * + * @param id id of the resource to check + * @return 200 if the resource exist, 404 if not + */ + def existResource(id: String): IO[StatusCode] = { + for { + token <- clientToken() + resp <- resourceRequest(token.access_token) + .get(uri"${authConfig.baseUri}/authz/protection/resource_set/$id") + .send(backend) + } yield resp.code + } + + /** + * Fetch a client token based on authConfig + * + * @return the token + */ + private def requestClientToken(): IO[PartyToken] = { + val body: Seq[(String, String)] = Seq( + "client_id" -> authConfig.clientId, + "client_secret" -> authConfig.clientSecret, + "grant_type" -> "client_credentials" + ) + val auth: IO[Response[Either[ResponseException[String, Error], PartyToken]]] = + basicRequest.post(uri"${authConfig.baseUri}/protocol/openid-connect/token") + .contentType(ApplicationXWwwFormUrlencoded) + .body(body, "utf-8") + .response(asJson[PartyToken]) + .send(backend) + auth.flatMap(r => IO.fromEither(r.body)) + } + + private val cacheLoader: AsyncCacheLoader[String, PartyToken] = (key: String, executor: Executor) => { + import cats.effect.unsafe.implicits.global + requestClientToken().evalOn(ExecutionContext.fromExecutor(executor)).unsafeToCompletableFuture() + } + + private val expiry = new Expiry[String, PartyToken]() { + override def expireAfterCreate(key: String, value: PartyToken, currentTime: Long): Long = { + val d = value.expires_in * 1E9 - 5 * 1E9 + d.toLong + } + + override def expireAfterUpdate(key: String, value: PartyToken, currentTime: Long, currentDuration: Long): Long = { + currentDuration + } + + override def expireAfterRead(key: String, value: PartyToken, currentTime: Long, currentDuration: Long): Long = { + currentDuration + } + + } + val cache: AsyncLoadingCache[String, PartyToken] = Caffeine.newBuilder() + .expireAfter(expiry) + .buildAsync(cacheLoader) + + protected[services] def clientToken(): IO[PartyToken] = IO.fromCompletableFuture(IO(cache.get("client_token"))) + + +} diff --git a/src/main/scala/bio/ferlab/ferload/services/S3Service.scala b/src/main/scala/bio/ferlab/ferload/services/S3Service.scala new file mode 100644 index 0000000..9ea5fa8 --- /dev/null +++ b/src/main/scala/bio/ferlab/ferload/services/S3Service.scala @@ -0,0 +1,74 @@ +package bio.ferlab.ferload.services + +import bio.ferlab.ferload.S3Config +import software.amazon.awssdk.auth.credentials.{AwsBasicCredentials, AwsCredentialsProvider, InstanceProfileCredentialsProvider, StaticCredentialsProvider} +import software.amazon.awssdk.regions.Region +import software.amazon.awssdk.services.s3.S3Configuration +import software.amazon.awssdk.services.s3.model.GetObjectRequest +import software.amazon.awssdk.services.s3.presigner.S3Presigner +import software.amazon.awssdk.services.s3.presigner.model.GetObjectPresignRequest + +import java.net.URI +import java.time.Duration +import scala.util.{Failure, Success, Try} + +class S3Service(s3Config: S3Config) { + val confBuilder: S3Configuration = software.amazon.awssdk.services.s3.S3Configuration.builder() + .pathStyleAccessEnabled(s3Config.pathAccessStyle) + .build() + val credentialsProvider: AwsCredentialsProvider = s3Config.accessKey.map { accessKey => + StaticCredentialsProvider.create( + AwsBasicCredentials.create(accessKey, s3Config.secretKey.get) + ) + }.getOrElse(InstanceProfileCredentialsProvider.create()) + + private val endpoint: Option[URI] = s3Config.endpoint.map(URI.create) + + private val presignerBuilder = S3Presigner + .builder() + + private val regionPresignerBuilder = s3Config.region.map(r => presignerBuilder.region(Region.of(r))).getOrElse(presignerBuilder) + + private val endpointPresignerBuilder = endpoint.map(regionPresignerBuilder.endpointOverride).getOrElse(regionPresignerBuilder) + + val presigner: S3Presigner = endpointPresignerBuilder + .serviceConfiguration(confBuilder) + .credentialsProvider(credentialsProvider) + .build() + + private val presignedUrlDuration: Duration = Duration.ofSeconds(s3Config.expirationPresignedUrlInSeconds) + + + def presignedUrl(bucket: String, file: String): String = { + + val getObjectRequest = + GetObjectRequest.builder() + .bucket(bucket) + .key(file) + .build() + + val getObjectPresignRequest = GetObjectPresignRequest.builder + .signatureDuration(presignedUrlDuration) + .getObjectRequest(getObjectRequest).build + + val presignedGetObjectRequest = presigner.presignGetObject(getObjectPresignRequest) + + val url = presignedGetObjectRequest.url() + url.toString + } + + +} + +object S3Service: + def parseS3Url(s3Url: String): Try[(String, String)] = { + val s3Pattern = "s3://([^/]+)/(.+)".r + s3Url match { + case s3Pattern(bucket, path) => Success((bucket, path)) + case _ => Failure(new IllegalArgumentException("Invalid S3 URL format")) + } + } + + def parseS3Urls(urls: Seq[String]): Try[(String, String)] = { + urls.to(LazyList).map(S3Service.parseS3Url).find(_.isSuccess).getOrElse(Failure(new IllegalStateException("No S3 URL found"))) + } \ No newline at end of file diff --git a/src/test/scala/bio/ferlab/ferload/endpoints/ConfigEndpointsSpec.scala b/src/test/scala/bio/ferlab/ferload/endpoints/ConfigEndpointsSpec.scala new file mode 100644 index 0000000..c27798e --- /dev/null +++ b/src/test/scala/bio/ferlab/ferload/endpoints/ConfigEndpointsSpec.scala @@ -0,0 +1,61 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.endpoints.ConfigEndpoint.configServerEndpoint +import bio.ferlab.ferload.model.{FerloadConfig, KeycloakConfig, TokenConfig} +import bio.ferlab.ferload.{AuthConfig, Config, DrsConfig, FerloadClientConfig, HttpConfig, S3Config, unwrap} +import cats.effect.IO +import io.circe.generic.auto.* +import org.scalatest.EitherValues +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.client3.circe.* +import sttp.client3.testing.SttpBackendStub +import sttp.client3.{UriContext, basicRequest} +import sttp.tapir.integ.cats.effect.CatsMonadError +import sttp.tapir.server.stub.TapirStubInterpreter + +class ConfigEndpointsSpec extends AnyFlatSpec with Matchers with EitherValues: + + "config" should "return expected config for password method" in { + //given + val config = Config( + AuthConfig("http://localhost:8080", "realm", "clientId", "clientSecret", None), + HttpConfig("localhost", 9090), + S3Config(Some("accessKey"), Some("secretKey"), Some("endpoint"), Some("bucket"), false, Some("region"), 3600), + DrsConfig("ferlaod", "Ferload", "ferload.ferlab.bio", "1.3.0", "Ferlab", "https://ferlab.bio"), + FerloadClientConfig(FerloadClientConfig.PASSWORD, Some("ferloadClientId"), None, None) + ) + val backendStub = TapirStubInterpreter(SttpBackendStub(new CatsMonadError[IO]())) + .whenServerEndpointRunLogic(configServerEndpoint(config)) + .backend() + // when + val response = basicRequest + .get(uri"http://test.com/config") + .response(asJson[FerloadConfig]) + .send(backendStub) + + val expected = FerloadConfig(FerloadClientConfig.PASSWORD, Some(KeycloakConfig("http://localhost:8080", "realm", "ferloadClientId", "clientId")), None) + response.map(_.body.value shouldBe expected).unwrap + } + + it should "return expected config for token method" in { + //given + val config = Config( + AuthConfig("http://localhost:8080", "realm", "clientId", "clientSecret", None), + HttpConfig("localhost", 9090), + S3Config(Some("accessKey"), Some("secretKey"), Some("endpoint"), Some("bucket"), false, Some("region"), 3600), + DrsConfig("ferlaod", "Ferload", "ferload.ferlab.bio", "1.3.0", "Ferlab", "https://ferlab.bio"), + FerloadClientConfig(FerloadClientConfig.TOKEN, None, Some("https://ferload.ferlab.bio/token"), Some("Please copy / paste this url in your browser to get a new authentication token.")) + ) + val backendStub = TapirStubInterpreter(SttpBackendStub(new CatsMonadError[IO]())) + .whenServerEndpointRunLogic(configServerEndpoint(config)) + .backend() + // when + val response = basicRequest + .get(uri"http://test.com/config") + .response(asJson[FerloadConfig]) + .send(backendStub) + + val expected = FerloadConfig(FerloadClientConfig.TOKEN, None, Some(TokenConfig("https://ferload.ferlab.bio/token", Some("Please copy / paste this url in your browser to get a new authentication token.")))) + response.map(_.body.value shouldBe expected).unwrap + } \ No newline at end of file diff --git a/src/test/scala/bio/ferlab/ferload/endpoints/EndpointsSpec.scala b/src/test/scala/bio/ferlab/ferload/endpoints/EndpointsSpec.scala new file mode 100644 index 0000000..b09720e --- /dev/null +++ b/src/test/scala/bio/ferlab/ferload/endpoints/EndpointsSpec.scala @@ -0,0 +1,30 @@ +package bio.ferlab.ferload.endpoints + +import bio.ferlab.ferload.endpoints.Endpoints.statusServerEndpoint +import bio.ferlab.ferload.unwrap +import cats.effect.IO +import org.scalatest.EitherValues +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.client3.testing.SttpBackendStub +import sttp.client3.{UriContext, basicRequest} +import sttp.tapir.integ.cats.effect.CatsMonadError +import sttp.tapir.server.stub.TapirStubInterpreter + +class EndpointsSpec extends AnyFlatSpec with Matchers with EitherValues: + + + "status" should "return ok" in { + //given + val backendStub = TapirStubInterpreter(SttpBackendStub(new CatsMonadError[IO]())) + .whenServerEndpointRunLogic(statusServerEndpoint) + .backend() + // when + val response = basicRequest + .get(uri"http://test.com/status") + .send(backendStub) + + response.map(_.body.value shouldBe "OK!").unwrap + } + + diff --git a/src/test/scala/bio/ferlab/ferload/package.scala b/src/test/scala/bio/ferlab/ferload/package.scala new file mode 100644 index 0000000..b7907b4 --- /dev/null +++ b/src/test/scala/bio/ferlab/ferload/package.scala @@ -0,0 +1,8 @@ +package bio.ferlab + +import cats.effect.IO +import cats.effect.unsafe.implicits.global + +package object ferload { + extension[T] (t: IO[T]) def unwrap: T = t.unsafeRunSync() +} diff --git a/src/test/scala/bio/ferlab/ferload/services/AuthorizationServiceSpec.scala b/src/test/scala/bio/ferlab/ferload/services/AuthorizationServiceSpec.scala new file mode 100644 index 0000000..9bbc741 --- /dev/null +++ b/src/test/scala/bio/ferlab/ferload/services/AuthorizationServiceSpec.scala @@ -0,0 +1,240 @@ +package bio.ferlab.ferload.services + +import bio.ferlab.ferload.AuthConfig +import bio.ferlab.ferload.unwrap +import bio.ferlab.ferload.model.{ErrorResponse, IntrospectResponse, Permissions, User} +import cats.effect.IO +import org.scalatest.EitherValues +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.client3.http4s.Http4sBackend +import sttp.client3.testing.RecordingSttpBackend +import sttp.client3.{HttpError, StringBody, UriContext} +import sttp.model.{MediaType, StatusCode} + +class AuthorizationServiceSpec extends AnyFlatSpec with Matchers with EitherValues { + + "requestPartyToken" should "return a token" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(_ => true) + .thenRespond(""" {"access_token": "E123456", "expires_in": 65, "refresh_expires_in": 0, "token_type" : "bearer"} """) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + val authorizationService = new AuthorizationService(authConfig, testingBackend) + authorizationService.requestPartyToken("https://ferlab.bio", Seq("FI1")).unwrap shouldBe "E123456" + testingBackend.allInteractions.size shouldBe 1 + val (request, _) = testingBackend.allInteractions.head + request.uri shouldBe uri"http://stub.local/realms/realm/protocol/openid-connect/token" + request.body.asInstanceOf[StringBody] shouldBe StringBody("grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Auma-ticket&audience=clientId&permission=FI1", "utf-8", MediaType("text", "plain")) + + } + + it should "return an error" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(_ => true) + .thenRespond(""" {"error": "invalid_token"}""", statusCode = StatusCode.Forbidden) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + val authorizationService = new AuthorizationService(authConfig, testingBackend) + a[HttpError[_]] should be thrownBy { + authorizationService.requestPartyToken("https://ferlab.bio", Seq("FI1")).unwrap + } + + testingBackend.allInteractions.size shouldBe 1 + val (request, _) = testingBackend.allInteractions.head + request.uri shouldBe uri"http://stub.local/realms/realm/protocol/openid-connect/token" + request.body.asInstanceOf[StringBody] shouldBe StringBody("grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Auma-ticket&audience=clientId&permission=FI1", "utf-8", MediaType("text", "plain")) + } + + "introspectPartyToken" should "return a resonse token" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(_ => true) + .thenRespond( + """ { + | "active": true, + | "exp": 65, + | "iat": 20, + | "aud" : "cqdg", + | "nbf": 4, + | "permissions" : [ + | { + | "resource_id": "F1", + | "rsname": "F1 Name", + | "resource_scopes": ["Scope1", "Scope2"] + | } + | ] + |} """.stripMargin) + ) + + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val authorizationService = new AuthorizationService(authConfig, testingBackend) + val resp = authorizationService.introspectPartyToken("E123456").unwrap + resp shouldBe IntrospectResponse(active = true, exp = Some(65), iat = Some(20), aud = Some("cqdg"), nbf = Some(4), permissions = Some(Seq(Permissions("F1", Some("F1 Name"), Seq("Scope1", "Scope2"))))) + + + } + + "authLogic" should "return a User" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token")) + .thenRespond(""" {"access_token": "E123456", "expires_in": 65, "refresh_expires_in": 0, "token_type" : "bearer"} """) + .whenRequestMatches(r => r.uri.path.contains("introspect")) + .thenRespond( + """ { + | "active": true, + | "exp": 65, + | "iat": 20, + | "aud" : "cqdg", + | "nbf": 4, + | "permissions" : [ + | { + | "resource_id": "F1", + | "rsname": "F1", + | "resource_scopes": ["Scope1", "Scope2"] + | } + | ] + |} """.stripMargin) + ) + + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val authorizationService = new AuthorizationService(authConfig, testingBackend) + + authorizationService.authLogic("token", Seq("F1")).unwrap.value shouldBe User("token", Set(Permissions("F1", Some("F1"), Seq("Scope1", "Scope2")))) + } + + it should "return a forbidden if user dont have access to all resources" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => { + r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token") + }) + .thenRespond(""" {"access_token": "E123456", "expires_in": 65, "refresh_expires_in": 0, "token_type" : "bearer"} """) + .whenRequestMatches(r => r.uri.path.contains("introspect")) + .thenRespond( + """ { + | "active": true, + | "exp": 65, + | "iat": 20, + | "aud" : "cqdg", + | "nbf": 4, + | "permissions" : [ + | { + | "resource_id": "F1", + | "rsname": "F1", + | "resource_scopes": ["Scope1", "Scope2"] + | } + | ] + |} """.stripMargin) + ) + + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val authorizationService = new AuthorizationService(authConfig, testingBackend) + + authorizationService.authLogic("token", Seq("F1", "F2")).unwrap.left.value shouldBe (StatusCode.Forbidden, ErrorResponse("Forbidden", 403)) + } + + it should "return a forbidden if user dont have access any resources" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => { + r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token") + }) + .thenRespond(""" { + | "error": "access_denied", + | "error_description": "not_authorized" + |} """.stripMargin, StatusCode.Forbidden) + .whenRequestMatches(r => r.uri.path.contains("introspect")) + .thenRespond( + """ { + | "active": true, + | "exp": 65, + | "iat": 20, + | "aud" : "cqdg", + | "nbf": 4, + | "permissions" : [ + | { + | "resource_id": "F1", + | "rsname": "F1", + | "resource_scopes": ["Scope1", "Scope2"] + | } + | ] + |} """.stripMargin) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val authorizationService = new AuthorizationService(authConfig, testingBackend) + + authorizationService.authLogic("token", Seq("F1")).unwrap.left.value shouldBe(StatusCode.Forbidden, ErrorResponse("Unauthorized", 403)) + } + + it should "return a resource not found if resource does not exist" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => { + r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token") + }) + .thenRespond( + """ { + | "error": "invalid_resource", + | "error_description": "Resource with id [FIdssda] does not exist." + |} """.stripMargin, StatusCode.BadRequest) + .whenRequestMatches(r => r.uri.path.contains("introspect")) + .thenRespond( + """ { + | "active": true, + | "exp": 65, + | "iat": 20, + | "aud" : "cqdg", + | "nbf": 4, + | "permissions" : [ + | { + | "resource_id": "F1", + | "rsname": "F1", + | "resource_scopes": ["Scope1", "Scope2"] + | } + | ] + |} """.stripMargin) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val authorizationService = new AuthorizationService(authConfig, testingBackend) + + authorizationService.authLogic("token", Seq("F1")).unwrap.left.value shouldBe(StatusCode.NotFound, ErrorResponse("Not Found", 404)) + } + + it should "return unauthorized if bearer token is not valid" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => { + r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token") + }) + .thenRespond( + """ { + | "error": "invalid_grant", + | "error_description": "Invalid bearer token" + |} """.stripMargin, StatusCode.Unauthorized) + .whenRequestMatches(r => r.uri.path.contains("introspect")) + .thenRespond( + """ { + | "active": true, + | "exp": 65, + | "iat": 20, + | "aud" : "cqdg", + | "nbf": 4, + | "permissions" : [ + | { + | "resource_id": "F1", + | "rsname": "F1", + | "resource_scopes": ["Scope1", "Scope2"] + | } + | ] + |} """.stripMargin) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val authorizationService = new AuthorizationService(authConfig, testingBackend) + + authorizationService.authLogic("token", Seq("F1")).unwrap.left.value shouldBe(StatusCode.Unauthorized, ErrorResponse("Unauthorized", 401)) + } + + +} diff --git a/src/test/scala/bio/ferlab/ferload/services/ResourceServiceSpec.scala b/src/test/scala/bio/ferlab/ferload/services/ResourceServiceSpec.scala new file mode 100644 index 0000000..126cd9f --- /dev/null +++ b/src/test/scala/bio/ferlab/ferload/services/ResourceServiceSpec.scala @@ -0,0 +1,138 @@ +package bio.ferlab.ferload.services + +import bio.ferlab.ferload.AuthConfig +import bio.ferlab.ferload.model.{PartyToken, ReadResource, ResourceScope} +import bio.ferlab.ferload.unwrap +import cats.effect.IO +import org.scalatest.EitherValues +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import sttp.client3.HttpError +import sttp.client3.http4s.Http4sBackend +import sttp.client3.testing.RecordingSttpBackend +import sttp.model.StatusCode + +class ResourceServiceSpec extends AnyFlatSpec with Matchers with EitherValues { + "clientToken" should "return a valid token" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(_ => true) + .thenRespond(""" {"access_token": "E123456", "expires_in": 65, "refresh_expires_in": 0, "token_type" : "bearer"} """) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val resourceService = new ResourceService(authConfig, testingBackend) + resourceService.clientToken().unwrap shouldBe PartyToken("E123456", 65, 0, None, "bearer") + } + + it should "return an error if creds are not valid" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(_ => true) + .thenRespond(""" {"error": "invalid_token"}""", statusCode = StatusCode.Forbidden) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val resourceService = new ResourceService(authConfig, testingBackend) + val error = the[HttpError[_]] thrownBy { + resourceService.clientToken().unwrap shouldBe PartyToken("E123456", 65, 0, None, "bearer") + } + + error should matchPattern { + case HttpError(_, StatusCode.Forbidden) => + } + } + + "existResource" should "return 200" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token")) + .thenRespond(""" {"access_token": "E123456", "expires_in": 65, "refresh_expires_in": 0, "token_type" : "bearer"} """) + .whenRequestMatches(r => r.uri.path == Seq("realms", "realm", "authz", "protection", "resource_set", "F1") && r.method.method == "GET") + .thenRespond("", statusCode = StatusCode.Ok) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val resourceService = new ResourceService(authConfig, testingBackend) + resourceService.existResource("F1").unwrap shouldBe StatusCode.Ok + } + + it should "return 404" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token")) + .thenRespond(""" {"access_token": "E123456", "expires_in": 65, "refresh_expires_in": 0, "token_type" : "bearer"} """) + .whenRequestMatches(r => r.uri.path == Seq("realms", "realm", "authz", "protection", "resource_set", "F1")) + .thenRespond("", statusCode = StatusCode.NotFound) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val resourceService = new ResourceService(authConfig, testingBackend) + resourceService.existResource("F1").unwrap shouldBe StatusCode.NotFound + } + + "getResourceById" should "return expected resouce" in { + val testingBackend = new RecordingSttpBackend(Http4sBackend.stub[IO] + .whenRequestMatches(r => r.uri.path == Seq("realms", "realm", "protocol", "openid-connect", "token")) + .thenRespond(""" {"access_token": "E123456", "expires_in": 65, "refresh_expires_in": 0, "token_type" : "bearer"} """) + .whenRequestMatches(r => r.uri.path == Seq("realms", "realm", "authz", "protection", "resource_set", "FI1")) + .thenRespond( + """ + |{ + | "name": "FI1", + | "type": "file", + | "owner": { + | "id": "c7259bdd-1650-48f4-a626-72e230e85793" + | }, + | "ownerManagedAccess": false, + | "displayName": "File Display name FI1", + | "attributes": { + | "checksum": [ + | "sha-256:bfbcf769ce225ba55ac3da3bbdd929a71291e16269425c4e83542b7937abf07e", + | "md5:c6a2db14d8a78a148e57733584e9865e" + | ], + | "created_time": [ + | "2023-09-22T08:55:22" + | ], + | "description": [ + | "Hello" + | ], + | "test": [ + | "World" + | ], + | "size": [ + | "1234" + | ] + | }, + | "_id": "ID_FI1", + | "uris": [ + | "s3://cqdg-file-dowbnload/FI1.csv" + | ], + | "resource_scopes": [ + | { + | "name": "ds1" + | } + | ], + | "scopes": [ + | { + | "name": "ds1" + | } + | ], + | "icon_uri": "" + |} + | """.stripMargin, statusCode = StatusCode.Ok) + ) + val authConfig = AuthConfig("http://stub.local", "realm", "clientId", "clientSecret", None) + + val resourceService = new ResourceService(authConfig, testingBackend) + resourceService.getResourceById("FI1").unwrap shouldBe ReadResource( + "ID_FI1", + "FI1", + Some("File Display name FI1"), + Some("file"), + Map("test" -> List("World"), "description" -> List("Hello"), "checksum" -> List("sha-256:bfbcf769ce225ba55ac3da3bbdd929a71291e16269425c4e83542b7937abf07e", "md5:c6a2db14d8a78a148e57733584e9865e"), "created_time" -> List("2023-09-22T08:55:22"), "size" -> List("1234")), + List("s3://cqdg-file-dowbnload/FI1.csv"), + Some(Seq(ResourceScope("ds1"))), + Some(Seq(ResourceScope("ds1"))) + ) + + + } + +}