diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 893be42f479..02a5d973e5a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,7 +12,7 @@ concurrency: cancel-in-progress: true env: - COMPILER_IMAGE: stashapp/compiler:8 + COMPILER_IMAGE: stashapp/compiler:9 jobs: build: @@ -23,6 +23,11 @@ jobs: - name: Checkout run: git fetch --prune --unshallow --tags + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version-file: 'go.mod' + - name: Pull compiler image run: docker pull $COMPILER_IMAGE diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index b4100b8d066..e29d56c7999 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -9,7 +9,7 @@ on: pull_request: env: - COMPILER_IMAGE: stashapp/compiler:8 + COMPILER_IMAGE: stashapp/compiler:9 jobs: golangci: @@ -21,6 +21,11 @@ jobs: - name: Checkout run: git fetch --prune --unshallow --tags + - name: Setup Go + uses: actions/setup-go@v5 + with: + go-version-file: 'go.mod' + - name: Pull compiler image run: docker pull $COMPILER_IMAGE diff --git a/.golangci.yml b/.golangci.yml index 48ca4fd75a7..dc833ddbf17 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -15,11 +15,11 @@ linters: - unused # Linters added by the stash project. # - contextcheck + - copyloopvar - dogsled - errchkjson - errorlint # - exhaustive - - exportloopref - gocritic # - goerr113 - gofmt diff --git a/Makefile b/Makefile index 237f48557ab..088c1797d89 100644 --- a/Makefile +++ b/Makefile @@ -307,7 +307,8 @@ test: # runs all tests - including integration tests .PHONY: it it: - go test -tags=integration ./... + $(eval GO_BUILD_TAGS += integration) + go test -tags "$(GO_BUILD_TAGS)" ./... # generates test mocks .PHONY: generate-test-mocks @@ -371,6 +372,20 @@ fmt-ui: validate-ui: cd ui/v2.5 && yarn run validate +# these targets run the same steps as fmt-ui and validate-ui, but only on files that have changed +fmt-ui-quick: + cd ui/v2.5 && yarn run prettier --write $$(git diff --name-only --relative --diff-filter d . ../../graphql) + +# does not run tsc checks, as they are slow +validate-ui-quick: + cd ui/v2.5 && \ + tsfiles=$$(git diff --name-only --relative --diff-filter d src | grep -e "\.tsx\?\$$"); \ + scssfiles=$$(git diff --name-only --relative --diff-filter d src | grep "\.scss"); \ + prettyfiles=$$(git diff --name-only --relative --diff-filter d . ../../graphql); \ + if [ -n "$$tsfiles" ]; then yarn run eslint $$tsfiles; fi && \ + if [ -n "$$scssfiles" ]; then yarn run stylelint $$scssfiles; fi && \ + if [ -n "$$prettyfiles" ]; then yarn run prettier --check $$prettyfiles; fi + # runs all of the backend PR-acceptance steps .PHONY: validate-backend validate-backend: lint it diff --git a/README.md b/README.md index 27830b31bfa..8c35c134c86 100644 --- a/README.md +++ b/README.md @@ -57,10 +57,11 @@ Stash can pull metadata (performers, tags, descriptions, studios, and more) dire [StashDB](http://stashdb.org) is the canonical instance of our open source metadata API, [stash-box](https://github.com/stashapp/stash-box). # Translation -[![Translate](https://hosted.weblate.org/widget/stashapp/stash/svg-badge.svg)](https://hosted.weblate.org/engage/stashapp/) -🇧🇷 🇨🇳 🇩🇰 🇳🇱 🇬🇧 🇪🇪 🇫🇮 🇫🇷 🇩🇪 🇮🇹 🇯🇵 🇰🇷 🇵🇱 🇷🇺 🇪🇸 🇸🇪 🇹🇼 🇹🇷 +[![Translate](https://translate.codeberg.org/widget/stash/stash/svg-badge.svg)](https://translate.codeberg.org/engage/stash/) -Stash is available in 25 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Stash's Weblate](https://hosted.weblate.org/projects/stashapp/stash/) to get started contributing new languages or improving existing ones. Thanks! +Stash is available in 32 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Codeberg's Weblate](https://translate.codeberg.org/projects/stash/stash/) to get started contributing new languages or improving existing ones. Thanks! + +[![Translation status](https://translate.codeberg.org/widget/stash/stash/multi-auto.svg)](https://translate.codeberg.org/engage/stash/) # Support (FAQ) diff --git a/cmd/phasher/main.go b/cmd/phasher/main.go index d4bf7959007..86419563173 100644 --- a/cmd/phasher/main.go +++ b/cmd/phasher/main.go @@ -18,7 +18,7 @@ func customUsage() { flag.PrintDefaults() } -func printPhash(ff *ffmpeg.FFMpeg, ffp ffmpeg.FFProbe, inputfile string, quiet *bool) error { +func printPhash(ff *ffmpeg.FFMpeg, ffp *ffmpeg.FFProbe, inputfile string, quiet *bool) error { ffvideoFile, err := ffp.NewVideoFile(inputfile) if err != nil { return err @@ -80,7 +80,7 @@ func main() { ffmpegPath, ffprobePath := getPaths() encoder := ffmpeg.NewEncoder(ffmpegPath) // don't need to InitHWSupport, phashing doesn't use hw acceleration - ffprobe := ffmpeg.FFProbe(ffprobePath) + ffprobe := ffmpeg.NewFFProbe(ffprobePath) for _, item := range args { if err := printPhash(encoder, ffprobe, item, quiet); err != nil { diff --git a/docker/build/x86_64/Dockerfile b/docker/build/x86_64/Dockerfile index cf47278e31d..b945c9c4adc 100644 --- a/docker/build/x86_64/Dockerfile +++ b/docker/build/x86_64/Dockerfile @@ -16,7 +16,7 @@ ARG STASH_VERSION RUN BUILD_DATE=$(date +"%Y-%m-%d %H:%M:%S") make ui # Build Backend -FROM golang:1.19-alpine as backend +FROM golang:1.22-alpine as backend RUN apk add --no-cache make alpine-sdk WORKDIR /stash COPY ./go* ./*.go Makefile gqlgen.yml .gqlgenc.yml /stash/ diff --git a/docker/build/x86_64/Dockerfile-CUDA b/docker/build/x86_64/Dockerfile-CUDA index 53ebb60c03b..f76c6dea609 100644 --- a/docker/build/x86_64/Dockerfile-CUDA +++ b/docker/build/x86_64/Dockerfile-CUDA @@ -16,7 +16,7 @@ ARG STASH_VERSION RUN BUILD_DATE=$(date +"%Y-%m-%d %H:%M:%S") make ui # Build Backend -FROM golang:1.19-bullseye as backend +FROM golang:1.22-bullseye as backend RUN apt update && apt install -y build-essential golang WORKDIR /stash COPY ./go* ./*.go Makefile gqlgen.yml .gqlgenc.yml /stash/ diff --git a/docker/compiler/Dockerfile b/docker/compiler/Dockerfile index d543ca20c6e..d69cea3e34d 100644 --- a/docker/compiler/Dockerfile +++ b/docker/compiler/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.19 +FROM golang:1.22 LABEL maintainer="https://discord.gg/2TsNFKt" diff --git a/docker/compiler/Makefile b/docker/compiler/Makefile index 2dd2f1e08f4..dbd9e16f89e 100644 --- a/docker/compiler/Makefile +++ b/docker/compiler/Makefile @@ -1,6 +1,6 @@ user=stashapp repo=compiler -version=8 +version=9 latest: docker build -t ${user}/${repo}:latest . diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index faf956d8f2b..5195dc34608 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -1,3 +1,24 @@ +## Goals and design vision + +The goal of stash is to be: +- an application for organising and viewing adult content - currently this is videos and images, in future this will be extended to include audio and text content + - organising includes scraping of metadata from websites and metadata repositories +- free and open-source +- portable and offline - can be run on a USB stick without needing to install dependencies (with the exception of ffmpeg) +- minimal, but highly extensible. The core feature set should be the minimum required to achieve the primary goal, while being extensible enough to extend via plugins +- easy to learn and use, with minimal technical knowledge required + +The core stash system is not intended for: +- managing downloading of content +- managing content on external websites +- publically sharing content + +Other requirements: +- support as many video and image formats as possible +- interfaces with external systems (for example stash-box) should be made as generic as possible. + +Design considerations: +- features are easy to add and difficult to remove. Large superfluous features should be scrutinised and avoided where possible (eg DLNA, filename parser). Such features should be considered for third-party plugins instead. ## Technical Debt Please be sure to consider how heavily your contribution impacts the maintainability of the project long term, sometimes less is more. We don't want to merge collossal pull requests with hundreds of dependencies by a driveby contributor. diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md index 03f6b6939fc..4a1cf30df04 100644 --- a/docs/DEVELOPMENT.md +++ b/docs/DEVELOPMENT.md @@ -4,7 +4,7 @@ * [Go](https://golang.org/dl/) * [GolangCI](https://golangci-lint.run/) - A meta-linter which runs several linters in parallel - * To install, follow the [local installation instructions](https://golangci-lint.run/usage/install/#local-installation) + * To install, follow the [local installation instructions](https://golangci-lint.run/welcome/install/#local-installation) * [Yarn](https://yarnpkg.com/en/docs/install) - Yarn package manager ## Environment @@ -69,6 +69,9 @@ NOTE: The `make` command in OpenBSD will be `gmake`. For example, `make pre-ui` * `make it` - Runs all unit and integration tests * `make fmt` - Formats the Go source code * `make fmt-ui` - Formats the UI source code +* `make validate-ui` - Runs tests and checks for the UI only +* `make fmt-ui-quick` - (experimental) Formats only changed UI source code +* `make validate-ui-quick` - (experimental) Runs tests and checks of changed UI code * `make server-start` - Runs a development stash server in the `.local` directory * `make server-clean` - Removes the `.local` directory and all of its contents * `make ui-start` - Runs the UI in development mode. Requires a running Stash server to connect to - the server URL can be changed from the default of `http://localhost:9999` using the environment variable `VITE_APP_PLATFORM_URL`, but keep in mind that authentication cannot be used since the session authorization cookie cannot be sent cross-origin. The UI runs on port `3000` or the next available port. diff --git a/go.mod b/go.mod index c47641c222b..7f7d6170332 100644 --- a/go.mod +++ b/go.mod @@ -1,9 +1,9 @@ module github.com/stashapp/stash -go 1.19 +go 1.22 require ( - github.com/99designs/gqlgen v0.17.2 + github.com/99designs/gqlgen v0.17.49 github.com/WithoutPants/sortorder v0.0.0-20230616003020-921c9ef69552 github.com/Yamashou/gqlgenc v0.0.6 github.com/anacrolix/dms v1.2.2 @@ -15,46 +15,48 @@ require ( github.com/disintegration/imaging v1.6.2 github.com/dop251/goja v0.0.0-20231027120936-b396bb4c349d github.com/doug-martin/goqu/v9 v9.18.0 - github.com/go-chi/chi/v5 v5.0.10 + github.com/go-chi/chi/v5 v5.0.12 github.com/go-chi/cors v1.2.1 github.com/go-chi/httplog v0.3.1 github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 - github.com/gofrs/uuid/v5 v5.0.0 + github.com/gofrs/uuid/v5 v5.1.0 github.com/golang-jwt/jwt/v4 v4.5.0 github.com/golang-migrate/migrate/v4 v4.16.2 github.com/gorilla/securecookie v1.1.1 github.com/gorilla/sessions v1.2.1 github.com/gorilla/websocket v1.5.0 - github.com/hashicorp/golang-lru/v2 v2.0.6 + github.com/hashicorp/golang-lru/v2 v2.0.7 github.com/jinzhu/copier v0.4.0 - github.com/jmoiron/sqlx v1.3.5 + github.com/jmoiron/sqlx v1.4.0 github.com/json-iterator/go v1.1.12 github.com/kermieisinthehouse/gosx-notifier v0.1.2 github.com/kermieisinthehouse/systray v1.2.4 github.com/knadh/koanf v1.5.0 github.com/lucasb-eyer/go-colorful v1.2.0 - github.com/mattn/go-sqlite3 v1.14.17 + github.com/mattn/go-sqlite3 v1.14.22 + github.com/mitchellh/mapstructure v1.5.0 github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007 github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/remeh/sizedwaitgroup v1.0.0 + github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cast v1.5.1 + github.com/spf13/cast v1.6.0 github.com/spf13/pflag v1.0.5 - github.com/stretchr/testify v1.8.4 + github.com/stretchr/testify v1.9.0 github.com/tidwall/gjson v1.16.0 github.com/vearutop/statigz v1.4.0 github.com/vektah/dataloaden v0.3.0 - github.com/vektah/gqlparser/v2 v2.4.2 + github.com/vektah/gqlparser/v2 v2.5.16 github.com/vektra/mockery/v2 v2.10.0 github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e github.com/zencoder/go-dash/v3 v3.0.2 - golang.org/x/crypto v0.21.0 - golang.org/x/image v0.12.0 - golang.org/x/net v0.23.0 - golang.org/x/sys v0.18.0 - golang.org/x/term v0.18.0 - golang.org/x/text v0.14.0 + golang.org/x/crypto v0.24.0 + golang.org/x/image v0.18.0 + golang.org/x/net v0.26.0 + golang.org/x/sys v0.21.0 + golang.org/x/term v0.21.0 + golang.org/x/text v0.16.0 gopkg.in/guregu/null.v4 v4.0.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -65,7 +67,7 @@ require ( github.com/asticode/go-astikit v0.20.0 // indirect github.com/asticode/go-astits v1.8.0 // indirect github.com/chromedp/sysutil v1.0.0 // indirect - github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dlclark/regexp2 v1.7.0 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect @@ -75,20 +77,18 @@ require ( github.com/gobwas/ws v1.3.0 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect - github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect - github.com/matryer/moq v0.2.3 // indirect github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect @@ -99,19 +99,21 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/rs/zerolog v1.30.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect + github.com/sosodev/duration v1.3.1 // indirect github.com/spf13/afero v1.9.5 // indirect github.com/spf13/cobra v1.7.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/viper v1.16.0 // indirect - github.com/stretchr/objx v0.5.0 // indirect + github.com/stretchr/objx v0.5.2 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect - github.com/urfave/cli/v2 v2.8.1 // indirect - github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect + github.com/urfave/cli/v2 v2.27.2 // indirect + github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 // indirect go.uber.org/atomic v1.11.0 // indirect - golang.org/x/mod v0.12.0 // indirect - golang.org/x/tools v0.13.0 // indirect + golang.org/x/mod v0.18.0 // indirect + golang.org/x/sync v0.7.0 // indirect + golang.org/x/tools v0.22.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index fc7a78ccc16..8c3b00d61f8 100644 --- a/go.sum +++ b/go.sum @@ -49,14 +49,19 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/99designs/gqlgen v0.17.2 h1:yczvlwMsfcVu/JtejqfrLwXuSP0yZFhmcss3caEvHw8= +filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= +filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= github.com/99designs/gqlgen v0.17.2/go.mod h1:K5fzLKwtph+FFgh9j7nFbRUdBKvTcGnsta51fsMTn3o= +github.com/99designs/gqlgen v0.17.49 h1:b3hNGexHd33fBSAd4NDT/c3NCcQzcAVkknhN9ym36YQ= +github.com/99designs/gqlgen v0.17.49/go.mod h1:tC8YFVZMed81x7UJ7ORUwXF4Kn6SXuucFqQBhN8+BU0= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60= github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/PuerkitoBio/goquery v1.9.2 h1:4/wZksC3KgkQw7SQgkKotmKljk0M6V8TUvA8Wb4yPeE= +github.com/PuerkitoBio/goquery v1.9.2/go.mod h1:GHPCaP0ODyyxqcNoFGYlAprUFH81NuRPd0GX3Zu2Mvk= github.com/RoaringBitmap/roaring v0.4.7/go.mod h1:8khRDP4HmeXns4xIj9oGrKSz7XTQiJx2zgh7AcNke4w= github.com/WithoutPants/sortorder v0.0.0-20230616003020-921c9ef69552 h1:eukVk+mGmbSZppLw8WJGpEUgMC570eb32y7FOsPW4Kc= github.com/WithoutPants/sortorder v0.0.0-20230616003020-921c9ef69552/go.mod h1:LKbO1i6L1lSlwWx4NHWVECxubHNKFz2YQoEMGXAFVy8= @@ -81,6 +86,9 @@ github.com/anacrolix/tagflag v0.0.0-20180109131632-2146c8d41bf0/go.mod h1:1m2U/K github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andybalholm/brotli v1.0.5 h1:8uQZIdzKmjc/iuPu7O2ioW48L81FgatrcpfFmiq/cCs= +github.com/andybalholm/brotli v1.0.5/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= +github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss= +github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU= github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E= github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8= github.com/antchfx/xpath v1.2.3 h1:CCZWOzv5bAqjVv0offZ2LVgVYFbeldKQVuLNbViZdes= @@ -114,6 +122,7 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bool64/dev v0.2.28 h1:6ayDfrB/jnNr2iQAZHI+uT3Qi6rErSbJYQs1y8rSrwM= +github.com/bool64/dev v0.2.28/go.mod h1:iJbh1y/HkunEPhgebWRNcs8wfGq7sjvJ6W5iabL8ACg= github.com/bradfitz/iter v0.0.0-20140124041915-454541ec3da2/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo= github.com/bradfitz/iter v0.0.0-20190303215204-33e6a9893b0c/go.mod h1:PyRFw1Lt2wKX4ZVSQ2mk+PeDa1rxyObEDlApuIsUKuo= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -154,8 +163,9 @@ github.com/corona10/goimagehash v1.1.0 h1:teNMX/1e+Wn/AYSbLHX8mj+mF9r60R1kBeqE9M github.com/corona10/goimagehash v1.1.0/go.mod h1:VkvE0mLn84L4aF8vCb6mafVajEb6QYMHl2ZJLn0mOGI= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/cpuguy83/go-md2man/v2 v2.0.2 h1:p1EgwI/C7NhT0JmVkwCD2ZBK8j4aeHQX2pMHHBfMQ6w= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -193,7 +203,8 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= -github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY= +github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= +github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= @@ -202,8 +213,8 @@ github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeME github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= -github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= -github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-chi/chi/v5 v5.0.12 h1:9euLV5sTrTNTRUU9POmDUvfxyj6LAABLUcEWO+JJb4s= +github.com/go-chi/chi/v5 v5.0.12/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= github.com/go-chi/httplog v0.3.1 h1:uC3IUWCZagtbCinb3ypFh36SEcgd6StWw2Bu0XSXRtg= @@ -220,8 +231,9 @@ github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= -github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= +github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.2-0.20181118220953-042da051cf31/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE= @@ -234,8 +246,8 @@ github.com/gobwas/ws v1.2.1/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/K github.com/gobwas/ws v1.3.0 h1:sbeU3Y4Qzlb+MOzIe6mQGf7QR4Hkv6ZD0qhGkBFL2O0= github.com/gobwas/ws v1.3.0/go.mod h1:hRKAFb8wOxFROYNsT1bqfWnhX+b5MFeJM9r2ZSwg/KY= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= -github.com/gofrs/uuid/v5 v5.0.0 h1:p544++a97kEL+svbcFbCQVM9KFu0Yo25UoISXGNNH9M= -github.com/gofrs/uuid/v5 v5.0.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= +github.com/gofrs/uuid/v5 v5.1.0 h1:S5rqVKIigghZTCBKPCw0Y+bXkn26K3TB5mvQq2Ix8dk= +github.com/gofrs/uuid/v5 v5.1.0/go.mod h1:CDOjlDMVAtN56jqyRUZh58JT31Tiw7/oQyEXZV+9bD8= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg= @@ -294,7 +306,8 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= @@ -319,6 +332,8 @@ github.com/google/pprof v0.0.0-20230207041349-798e818bf904 h1:4/hN5RUoecvl+RmJRE github.com/google/pprof v0.0.0-20230207041349-798e818bf904/go.mod h1:uglQLonpP8qtYCYyzA+8c/9qtqgA3qsXGYqCPKARAFg= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= @@ -368,10 +383,9 @@ github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/golang-lru/v2 v2.0.6 h1:3xi/Cafd1NaoEnS/yDssIiuVeDVywU0QdFGl3aQaQHM= -github.com/hashicorp/golang-lru/v2 v2.0.6/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= +github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= @@ -399,8 +413,8 @@ github.com/jinzhu/copier v0.4.0 h1:w3ciUoD19shMCRargcpm0cm91ytaBhDvuRpz1ODO/U8= github.com/jinzhu/copier v0.4.0/go.mod h1:DfbEm0FYsaqBcKcFuvmOZb218JkPGtvSHsKg8S8hyyg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= -github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= +github.com/jmoiron/sqlx v1.4.0 h1:1PLqN7S1UYp5t4SrVVnt4nUVNemrDAtxlulVe+Qgm3o= +github.com/jmoiron/sqlx v1.4.0/go.mod h1:ZrZ7UsYB/weZdl2Bxg6jCRO9c3YHl8r3ahlKmRT4JLY= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -435,15 +449,16 @@ github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfn github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80 h1:6Yzfa6GP0rIo/kULo2bwGEkFvCePZ3qHDDTC3/J9Swo= github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs= -github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.10.1/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8= +github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= @@ -453,7 +468,6 @@ github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0V github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/matryer/moq v0.2.3 h1:Q06vEqnBYjjfx5KKgHfYRKE/lvlRu+Nj+xodG4YdHnU= github.com/matryer/moq v0.2.3/go.mod h1:9RtPYjTnH1bSBIkpvtHkFN7nbWAnO7oRpdJkEIn6UtE= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -469,12 +483,12 @@ github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOA github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/go-sqlite3 v1.14.17 h1:mCRHCLDUBXgpKAqIKsaAaAsrAlbkeomtRFKXh2L6YIM= -github.com/mattn/go-sqlite3 v1.14.17/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= +github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU= +github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= @@ -565,6 +579,7 @@ github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6L github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= +github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/xid v1.4.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= @@ -575,6 +590,8 @@ github.com/rs/zerolog v1.30.0/go.mod h1:/tk+P47gFdPXq4QYjvCmT5/Gsug2nagsFWBWhAiS github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc= +github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= @@ -582,8 +599,9 @@ github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5P github.com/sagikazarmark/crypt v0.3.0/go.mod h1:uD/D+6UF4SrIR1uGEv7bBNkNqLGqUr43MRiaGWX1Nig= github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDNxqmyJ6RfDFM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= +github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk= github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= @@ -594,6 +612,8 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v0.0.0-20181108003508-044398e4856c/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= +github.com/sosodev/duration v1.3.1 h1:qtHBDMQ6lvMQsL15g4aopM4HEfOaYuhWBw3NPTtlqq4= +github.com/sosodev/duration v1.3.1/go.mod h1:RQIBBX0+fMLc/D9+Jb/fwvVmo0eZvDDEERAikUR6SDg= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= @@ -601,8 +621,8 @@ github.com/spf13/afero v1.8.0/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfA github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM= github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= -github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= +github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= +github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.3.0/go.mod h1:BrRVncBjOJa/eUcVVm9CE+oC6as8k+VYr4NY7WCi9V4= github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= @@ -617,8 +637,9 @@ github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1Fof github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= -github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= @@ -628,8 +649,9 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= +github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= @@ -643,23 +665,23 @@ github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhso github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= -github.com/urfave/cli/v2 v2.8.1 h1:CGuYNZF9IKZY/rfBe3lJpccSoIY1ytfvmgQT90cNOl4= -github.com/urfave/cli/v2 v2.8.1/go.mod h1:Z41J9TPoffeoqP0Iza0YbAhGvymRdZAd2uPmZ5JxRdY= +github.com/urfave/cli/v2 v2.27.2 h1:6e0H+AkS+zDckwPCUrZkKX38mRaau4nL2uipkJpbkcI= +github.com/urfave/cli/v2 v2.27.2/go.mod h1:g0+79LmHHATl7DAcHO99smiR/T7uGLw84w8Y42x+4eM= github.com/vearutop/statigz v1.4.0 h1:RQL0KG3j/uyA/PFpHeZ/L6l2ta920/MxlOAIGEOuwmU= github.com/vearutop/statigz v1.4.0/go.mod h1:LYTolBLiz9oJISwiVKnOQoIwhO1LWX1A7OECawGS8XE= github.com/vektah/dataloaden v0.3.0 h1:ZfVN2QD6swgvp+tDqdH/OIT/wu3Dhu0cus0k5gIZS84= github.com/vektah/dataloaden v0.3.0/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= github.com/vektah/gqlparser/v2 v2.4.0/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= github.com/vektah/gqlparser/v2 v2.4.1/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= -github.com/vektah/gqlparser/v2 v2.4.2 h1:29TGc6QmhEUq5fll+2FPoTmhUhR65WEKN4VK/jo0OlM= -github.com/vektah/gqlparser/v2 v2.4.2/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0= +github.com/vektah/gqlparser/v2 v2.5.16 h1:1gcmLTvs3JLKXckwCwlUagVn/IlV2bwqle0vJ0vy5p8= +github.com/vektah/gqlparser/v2 v2.5.16/go.mod h1:1lz1OeCqgQbQepsGxPVywrjdBHW2T08PUS3pJqepRww= github.com/vektra/mockery/v2 v2.10.0 h1:MiiQWxwdq7/ET6dCXLaJzSGEN17k758H7JHS9kOdiks= github.com/vektra/mockery/v2 v2.10.0/go.mod h1:m/WO2UzWzqgVX3nvqpRQq70I4Z7jbSCRhdmkgtp+Ab4= github.com/willf/bitset v1.1.9/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e h1:GruPsb+44XvYAzuAgJW1d1WHqmcI73L2XSjsbx/eJZw= github.com/xWTF/chardet v0.0.0-20230208095535-c780f2ac244e/go.mod h1:wA8kQ8WFipMciY9WcWzqQgZordm/P7l8IZdvx1crwmc= -github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 h1:bAn7/zixMGCfxrRTfdpNzjtPYqr8smhKouy9mxVdGPU= -github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsrSj3ccvlPHLoLsHnpR27oXr4ZE984MbSER8= +github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913 h1:+qGGcbkzsfDQNPPe9UDgpxAWQrhbbBXOYJFQDq/dtJw= +github.com/xrash/smetrics v0.0.0-20240312152122-5f08fbb34913/go.mod h1:4aEEwZQutDLsQv2Deui4iYQ6DWTxR14g6m8Wv88+Xqk= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -706,8 +728,8 @@ golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20211215165025-cf75a172585e/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20220112180741-5e0467b6c7ce/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA= -golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs= +golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= +golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -721,8 +743,8 @@ golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMk golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/image v0.12.0 h1:w13vZbU4o5rKOFFR8y7M+c4A5jXDC0uXTdHYRP8X2DQ= -golang.org/x/image v0.12.0/go.mod h1:Lu90jvHG7GfemOIcldsh9A2hS01ocl6oNO7ype5mEnk= +golang.org/x/image v0.18.0 h1:jGzIakQa/ZXI1I0Fxvaa9W7yP25TqT6cHIHn+6CqvSQ= +golang.org/x/image v0.18.0/go.mod h1:4yyo5vMFQjVjUcVk4jEQcU9MGy/rulF5WvUILseCM2E= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -750,9 +772,8 @@ golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro= golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= -golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0= +golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -802,9 +823,8 @@ golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= -golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= -golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= +golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -834,8 +854,8 @@ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -925,17 +945,15 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= -golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= +golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= -golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= -golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +golang.org/x/term v0.21.0 h1:WVXCp+/EBEHOj53Rvu+7KiT/iElMrO8ACK16SMZ3jaA= +golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -948,10 +966,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= +golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1017,9 +1033,8 @@ golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= -golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.22.0 h1:gqSGLZqv+AI9lIQzniJ0nZDRG5GBPsSi+DRNHWNz6yA= +golang.org/x/tools v0.22.0/go.mod h1:aCwcsjqvq7Yqt6TNyX7QMU2enbQ/Gt0bo6krSeEri+c= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/gqlgen.yml b/gqlgen.yml index c6a434e25b7..9f22ccb49e2 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -51,6 +51,11 @@ models: fieldName: DurationFinite frame_rate: fieldName: FrameRateFinite + # movie is group under the hood + Movie: + model: github.com/stashapp/stash/pkg/models.Group + MovieFilterType: + model: github.com/stashapp/stash/pkg/models.GroupFilterType # autobind on config causes generation issues BlobsStorageType: model: github.com/stashapp/stash/internal/manager/config.BlobsStorageType diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 5ec16b17b07..251c2af838c 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -4,6 +4,7 @@ type Query { findSavedFilter(id: ID!): SavedFilter findSavedFilters(mode: FilterMode): [SavedFilter!]! findDefaultFilter(mode: FilterMode!): SavedFilter + @deprecated(reason: "default filter now stored in UI config") "Find a scene by ID or Checksum" findScene(id: ID, checksum: String): Scene @@ -76,13 +77,22 @@ type Query { ): FindStudiosResultType! "Find a movie by ID" - findMovie(id: ID!): Movie + findMovie(id: ID!): Movie @deprecated(reason: "Use findGroup instead") "A function which queries Movie objects" findMovies( movie_filter: MovieFilterType filter: FindFilterType ids: [ID!] - ): FindMoviesResultType! + ): FindMoviesResultType! @deprecated(reason: "Use findGroups instead") + + "Find a group by ID" + findGroup(id: ID!): Group + "A function which queries Group objects" + findGroups( + group_filter: GroupFilterType + filter: FindFilterType + ids: [ID!] + ): FindGroupsResultType! findGallery(id: ID!): Gallery findGalleries( @@ -155,7 +165,13 @@ type Query { scrapeSingleMovie( source: ScraperSourceInput! input: ScrapeSingleMovieInput! - ): [ScrapedMovie!]! + ): [ScrapedMovie!]! @deprecated(reason: "Use scrapeSingleGroup instead") + + "Scrape for a single group" + scrapeSingleGroup( + source: ScraperSourceInput! + input: ScrapeSingleGroupInput! + ): [ScrapedGroup!]! "Scrapes content based on a URL" scrapeURL(url: String!, ty: ScrapeContentType!): ScrapedContent @@ -168,6 +184,9 @@ type Query { scrapeGalleryURL(url: String!): ScrapedGallery "Scrapes a complete movie record based on a URL" scrapeMovieURL(url: String!): ScrapedMovie + @deprecated(reason: "Use scrapeGroupURL instead") + "Scrapes a complete group record based on a URL" + scrapeGroupURL(url: String!): ScrapedGroup # Plugins "List loaded plugins" @@ -213,7 +232,7 @@ type Query { allPerformers: [Performer!]! allTags: [Tag!]! @deprecated(reason: "Use findTags instead") allStudios: [Studio!]! @deprecated(reason: "Use findStudios instead") - allMovies: [Movie!]! @deprecated(reason: "Use findMovies instead") + allMovies: [Movie!]! @deprecated(reason: "Use findGroups instead") # Get everything with minimal metadata @@ -257,6 +276,13 @@ type Mutation { "Sets the resume time point (if provided) and adds the provided duration to the scene's play duration" sceneSaveActivity(id: ID!, resume_time: Float, playDuration: Float): Boolean! + "Resets the resume time point and play duration" + sceneResetActivity( + id: ID! + reset_resume: Boolean + reset_duration: Boolean + ): Boolean! + "Increments the play count for the scene. Returns the new play count value." sceneIncrementPlayCount(id: ID!): Int! @deprecated(reason: "Use sceneAddPlay instead") @@ -298,6 +324,8 @@ type Mutation { addGalleryImages(input: GalleryAddInput!): Boolean! removeGalleryImages(input: GalleryRemoveInput!): Boolean! + setGalleryCover(input: GallerySetCoverInput!): Boolean! + resetGalleryCover(input: GalleryResetCoverInput!): Boolean! galleryChapterCreate(input: GalleryChapterCreateInput!): GalleryChapter galleryChapterUpdate(input: GalleryChapterUpdateInput!): GalleryChapter @@ -315,16 +343,34 @@ type Mutation { studiosDestroy(ids: [ID!]!): Boolean! movieCreate(input: MovieCreateInput!): Movie + @deprecated(reason: "Use groupCreate instead") movieUpdate(input: MovieUpdateInput!): Movie + @deprecated(reason: "Use groupUpdate instead") movieDestroy(input: MovieDestroyInput!): Boolean! + @deprecated(reason: "Use groupDestroy instead") moviesDestroy(ids: [ID!]!): Boolean! + @deprecated(reason: "Use groupsDestroy instead") bulkMovieUpdate(input: BulkMovieUpdateInput!): [Movie!] + @deprecated(reason: "Use bulkGroupUpdate instead") + + groupCreate(input: GroupCreateInput!): Group + groupUpdate(input: GroupUpdateInput!): Group + groupDestroy(input: GroupDestroyInput!): Boolean! + groupsDestroy(ids: [ID!]!): Boolean! + bulkGroupUpdate(input: BulkGroupUpdateInput!): [Group!] + + addGroupSubGroups(input: GroupSubGroupAddInput!): Boolean! + removeGroupSubGroups(input: GroupSubGroupRemoveInput!): Boolean! + + "Reorder sub groups within a group. Returns true if successful." + reorderSubGroups(input: ReorderSubGroupsInput!): Boolean! tagCreate(input: TagCreateInput!): Tag tagUpdate(input: TagUpdateInput!): Tag tagDestroy(input: TagDestroyInput!): Boolean! tagsDestroy(ids: [ID!]!): Boolean! tagsMerge(input: TagsMergeInput!): Tag + bulkTagUpdate(input: BulkTagUpdateInput!): [Tag!] """ Moves the given files to the given destination. Returns true if successful. @@ -344,6 +390,7 @@ type Mutation { saveFilter(input: SaveFilterInput!): SavedFilter! destroySavedFilter(input: DestroyFilterInput!): Boolean! setDefaultFilter(input: SetDefaultFilterInput!): Boolean! + @deprecated(reason: "now uses UI config") "Change general configuration options" configureGeneral(input: ConfigGeneralInput!): ConfigGeneralResult! diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 5d5209006da..f0f84efda8c 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -170,6 +170,14 @@ input PerformerFilterType { birthdate: DateCriterionInput "Filter by death date" death_date: DateCriterionInput + "Filter by related scenes that meet this criteria" + scenes_filter: SceneFilterType + "Filter by related images that meet this criteria" + images_filter: ImageFilterType + "Filter by related galleries that meet this criteria" + galleries_filter: GalleryFilterType + "Filter by related tags that meet this criteria" + tags_filter: TagFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" @@ -183,6 +191,8 @@ input SceneMarkerFilterType { scene_tags: HierarchicalMultiCriterionInput "Filter to only include scene markers with these performers" performers: MultiCriterionInput + "Filter to only include scene markers from these scenes" + scenes: MultiCriterionInput "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" @@ -193,6 +203,8 @@ input SceneMarkerFilterType { scene_created_at: TimestampCriterionInput "Filter by lscene ast update time" scene_updated_at: TimestampCriterionInput + "Filter by related scenes that meet this criteria" + scene_filter: SceneFilterType } input SceneFilterType { @@ -247,7 +259,9 @@ input SceneFilterType { "Filter to only include scenes with this studio" studios: HierarchicalMultiCriterionInput "Filter to only include scenes with this movie" - movies: MultiCriterionInput + movies: MultiCriterionInput @deprecated(reason: "use groups instead") + "Filter to only include scenes with this group" + groups: HierarchicalMultiCriterionInput "Filter to only include scenes with this gallery" galleries: MultiCriterionInput "Filter to only include scenes with these tags" @@ -288,9 +302,29 @@ input SceneFilterType { created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + "Filter by related galleries that meet this criteria" + galleries_filter: GalleryFilterType + "Filter by related performers that meet this criteria" + performers_filter: PerformerFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType + "Filter by related tags that meet this criteria" + tags_filter: TagFilterType + "Filter by related movies that meet this criteria" + movies_filter: MovieFilterType + @deprecated(reason: "use groups_filter instead") + "Filter by related groups that meet this criteria" + groups_filter: GroupFilterType + "Filter by related markers that meet this criteria" + markers_filter: SceneMarkerFilterType } input MovieFilterType { + AND: MovieFilterType + OR: MovieFilterType + NOT: MovieFilterType + name: StringCriterionInput director: StringCriterionInput synopsis: StringCriterionInput @@ -307,12 +341,68 @@ input MovieFilterType { url: StringCriterionInput "Filter to only include movies where performer appears in a scene" performers: MultiCriterionInput + "Filter to only include movies with these tags" + tags: HierarchicalMultiCriterionInput + "Filter by tag count" + tag_count: IntCriterionInput "Filter by date" date: DateCriterionInput "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + "Filter by related scenes that meet this criteria" + scenes_filter: SceneFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType +} + +input GroupFilterType { + AND: GroupFilterType + OR: GroupFilterType + NOT: GroupFilterType + + name: StringCriterionInput + director: StringCriterionInput + synopsis: StringCriterionInput + + "Filter by duration (in seconds)" + duration: IntCriterionInput + # rating expressed as 1-100 + rating100: IntCriterionInput + "Filter to only include groups with this studio" + studios: HierarchicalMultiCriterionInput + "Filter to only include groups missing this property" + is_missing: String + "Filter by url" + url: StringCriterionInput + "Filter to only include groups where performer appears in a scene" + performers: MultiCriterionInput + "Filter to only include groups with these tags" + tags: HierarchicalMultiCriterionInput + "Filter by tag count" + tag_count: IntCriterionInput + "Filter by date" + date: DateCriterionInput + "Filter by creation time" + created_at: TimestampCriterionInput + "Filter by last update time" + updated_at: TimestampCriterionInput + + "Filter by containing groups" + containing_groups: HierarchicalMultiCriterionInput + "Filter by sub groups" + sub_groups: HierarchicalMultiCriterionInput + "Filter by number of containing groups the group has" + containing_group_count: IntCriterionInput + "Filter by number of sub-groups the group has" + sub_group_count: IntCriterionInput + + "Filter by related scenes that meet this criteria" + scenes_filter: SceneFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType } input StudioFilterType { @@ -326,6 +416,8 @@ input StudioFilterType { parents: MultiCriterionInput "Filter by StashID" stash_id_endpoint: StashIDCriterionInput + "Filter to only include studios with these tags" + tags: HierarchicalMultiCriterionInput "Filter to only include studios missing this property" is_missing: String # rating expressed as 1-100 @@ -338,6 +430,8 @@ input StudioFilterType { image_count: IntCriterionInput "Filter by gallery count" gallery_count: IntCriterionInput + "Filter by tag count" + tag_count: IntCriterionInput "Filter by url" url: StringCriterionInput "Filter by studio aliases" @@ -346,6 +440,12 @@ input StudioFilterType { child_count: IntCriterionInput "Filter by autotag ignore value" ignore_auto_tag: Boolean + "Filter by related scenes that meet this criteria" + scenes_filter: SceneFilterType + "Filter by related images that meet this criteria" + images_filter: ImageFilterType + "Filter by related galleries that meet this criteria" + galleries_filter: GalleryFilterType "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" @@ -411,6 +511,17 @@ input GalleryFilterType { code: StringCriterionInput "Filter by photographer" photographer: StringCriterionInput + + "Filter by related scenes that meet this criteria" + scenes_filter: SceneFilterType + "Filter by related images that meet this criteria" + images_filter: ImageFilterType + "Filter by related performers that meet this criteria" + performers_filter: PerformerFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType + "Filter by related tags that meet this criteria" + tags_filter: TagFilterType } input TagFilterType { @@ -445,6 +556,15 @@ input TagFilterType { "Filter by number of performers with this tag" performer_count: IntCriterionInput + "Filter by number of studios with this tag" + studio_count: IntCriterionInput + + "Filter by number of movies with this tag" + movie_count: IntCriterionInput + + "Filter by number of group with this tag" + group_count: IntCriterionInput + "Filter by number of markers with this tag" marker_count: IntCriterionInput @@ -463,6 +583,13 @@ input TagFilterType { "Filter by autotag ignore value" ignore_auto_tag: Boolean + "Filter by related scenes that meet this criteria" + scenes_filter: SceneFilterType + "Filter by related images that meet this criteria" + images_filter: ImageFilterType + "Filter by related galleries that meet this criteria" + galleries_filter: GalleryFilterType + "Filter by creation time" created_at: TimestampCriterionInput @@ -528,6 +655,15 @@ input ImageFilterType { code: StringCriterionInput "Filter by photographer" photographer: StringCriterionInput + + "Filter by related galleries that meet this criteria" + galleries_filter: GalleryFilterType + "Filter by related performers that meet this criteria" + performers_filter: PerformerFilterType + "Filter by related studios that meet this criteria" + studios_filter: StudioFilterType + "Filter by related tags that meet this criteria" + tags_filter: TagFilterType } enum CriterionModifier { @@ -623,6 +759,7 @@ enum FilterMode { GALLERIES SCENE_MARKERS MOVIES + GROUPS TAGS IMAGES } diff --git a/graphql/schema/types/gallery.graphql b/graphql/schema/types/gallery.graphql index 47f6c7d7eee..999a743f762 100644 --- a/graphql/schema/types/gallery.graphql +++ b/graphql/schema/types/gallery.graphql @@ -1,3 +1,8 @@ +type GalleryPathsType { + cover: String! + preview: String! # Resolver +} + "Gallery type" type Gallery { id: ID! @@ -25,6 +30,9 @@ type Gallery { performers: [Performer!]! cover: Image + + paths: GalleryPathsType! # Resolver + image(index: Int!): Image! } input GalleryCreateInput { @@ -108,3 +116,12 @@ input GalleryRemoveInput { gallery_id: ID! image_ids: [ID!]! } + +input GallerySetCoverInput { + gallery_id: ID! + cover_image_id: ID! +} + +input GalleryResetCoverInput { + gallery_id: ID! +} diff --git a/graphql/schema/types/group.graphql b/graphql/schema/types/group.graphql new file mode 100644 index 00000000000..b42e4fd1fef --- /dev/null +++ b/graphql/schema/types/group.graphql @@ -0,0 +1,137 @@ +"GroupDescription represents a relationship to a group with a description of the relationship" +type GroupDescription { + group: Group! + description: String +} + +type Group { + id: ID! + name: String! + aliases: String + "Duration in seconds" + duration: Int + date: String + # rating expressed as 1-100 + rating100: Int + studio: Studio + director: String + synopsis: String + urls: [String!]! + tags: [Tag!]! + created_at: Time! + updated_at: Time! + + containing_groups: [GroupDescription!]! + sub_groups: [GroupDescription!]! + + front_image_path: String # Resolver + back_image_path: String # Resolver + scene_count(depth: Int): Int! # Resolver + sub_group_count(depth: Int): Int! # Resolver + scenes: [Scene!]! +} + +input GroupDescriptionInput { + group_id: ID! + description: String +} + +input GroupCreateInput { + name: String! + aliases: String + "Duration in seconds" + duration: Int + date: String + # rating expressed as 1-100 + rating100: Int + studio_id: ID + director: String + synopsis: String + urls: [String!] + tag_ids: [ID!] + + containing_groups: [GroupDescriptionInput!] + sub_groups: [GroupDescriptionInput!] + + "This should be a URL or a base64 encoded data URL" + front_image: String + "This should be a URL or a base64 encoded data URL" + back_image: String +} + +input GroupUpdateInput { + id: ID! + name: String + aliases: String + duration: Int + date: String + # rating expressed as 1-100 + rating100: Int + studio_id: ID + director: String + synopsis: String + urls: [String!] + tag_ids: [ID!] + + containing_groups: [GroupDescriptionInput!] + sub_groups: [GroupDescriptionInput!] + + "This should be a URL or a base64 encoded data URL" + front_image: String + "This should be a URL or a base64 encoded data URL" + back_image: String +} + +input BulkUpdateGroupDescriptionsInput { + groups: [GroupDescriptionInput!]! + mode: BulkUpdateIdMode! +} + +input BulkGroupUpdateInput { + clientMutationId: String + ids: [ID!] + # rating expressed as 1-100 + rating100: Int + studio_id: ID + director: String + urls: BulkUpdateStrings + tag_ids: BulkUpdateIds + + containing_groups: BulkUpdateGroupDescriptionsInput + sub_groups: BulkUpdateGroupDescriptionsInput +} + +input GroupDestroyInput { + id: ID! +} + +input ReorderSubGroupsInput { + "ID of the group to reorder sub groups for" + group_id: ID! + """ + IDs of the sub groups to reorder. These must be a subset of the current sub groups. + Sub groups will be inserted in this order at the insert_index + """ + sub_group_ids: [ID!]! + "The sub-group ID at which to insert the sub groups" + insert_at_id: ID! + "If true, the sub groups will be inserted after the insert_index, otherwise they will be inserted before" + insert_after: Boolean +} + +type FindGroupsResultType { + count: Int! + groups: [Group!]! +} + +input GroupSubGroupAddInput { + containing_group_id: ID! + sub_groups: [GroupDescriptionInput!]! + "The index at which to insert the sub groups. If not provided, the sub groups will be appended to the end" + insert_index: Int +} + +input GroupSubGroupRemoveInput { + containing_group_id: ID! + sub_group_ids: [ID!]! +} diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index 3221b0cc68d..38c910d369c 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -284,7 +284,8 @@ input ExportObjectsInput { studios: ExportObjectTypeInput performers: ExportObjectTypeInput tags: ExportObjectTypeInput - movies: ExportObjectTypeInput + groups: ExportObjectTypeInput + movies: ExportObjectTypeInput @deprecated(reason: "Use groups instead") galleries: ExportObjectTypeInput includeDependencies: Boolean } diff --git a/graphql/schema/types/movie.graphql b/graphql/schema/types/movie.graphql index 1a52c91ea27..845827b3f17 100644 --- a/graphql/schema/types/movie.graphql +++ b/graphql/schema/types/movie.graphql @@ -10,13 +10,15 @@ type Movie { studio: Studio director: String synopsis: String - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!]! + tags: [Tag!]! created_at: Time! updated_at: Time! front_image_path: String # Resolver back_image_path: String # Resolver - scene_count: Int! # Resolver + scene_count(depth: Int): Int! # Resolver scenes: [Scene!]! } @@ -31,7 +33,9 @@ input MovieCreateInput { studio_id: ID director: String synopsis: String - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] + tag_ids: [ID!] "This should be a URL or a base64 encoded data URL" front_image: String "This should be a URL or a base64 encoded data URL" @@ -49,7 +53,9 @@ input MovieUpdateInput { studio_id: ID director: String synopsis: String - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] + tag_ids: [ID!] "This should be a URL or a base64 encoded data URL" front_image: String "This should be a URL or a base64 encoded data URL" @@ -63,6 +69,8 @@ input BulkMovieUpdateInput { rating100: Int studio_id: ID director: String + urls: BulkUpdateStrings + tag_ids: BulkUpdateIds } input MovieDestroyInput { diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index c5d32842513..d6f3dd832c4 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -16,10 +16,11 @@ type Performer { id: ID! name: String! disambiguation: String - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] gender: GenderEnum - twitter: String - instagram: String + twitter: String @deprecated(reason: "Use urls") + instagram: String @deprecated(reason: "Use urls") birthdate: String ethnicity: String country: String @@ -41,7 +42,8 @@ type Performer { scene_count: Int! # Resolver image_count: Int! # Resolver gallery_count: Int! # Resolver - movie_count: Int! # Resolver + group_count: Int! # Resolver + movie_count: Int! @deprecated(reason: "use group_count instead") # Resolver performer_count: Int! # Resolver o_counter: Int # Resolver scenes: [Scene!]! @@ -54,13 +56,15 @@ type Performer { weight: Int created_at: Time! updated_at: Time! - movies: [Movie!]! + groups: [Group!]! + movies: [Movie!]! @deprecated(reason: "use groups instead") } input PerformerCreateInput { name: String! disambiguation: String - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] gender: GenderEnum birthdate: String ethnicity: String @@ -75,8 +79,8 @@ input PerformerCreateInput { tattoos: String piercings: String alias_list: [String!] - twitter: String - instagram: String + twitter: String @deprecated(reason: "Use urls") + instagram: String @deprecated(reason: "Use urls") favorite: Boolean tag_ids: [ID!] "This should be a URL or a base64 encoded data URL" @@ -95,7 +99,8 @@ input PerformerUpdateInput { id: ID! name: String disambiguation: String - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] gender: GenderEnum birthdate: String ethnicity: String @@ -110,8 +115,8 @@ input PerformerUpdateInput { tattoos: String piercings: String alias_list: [String!] - twitter: String - instagram: String + twitter: String @deprecated(reason: "Use urls") + instagram: String @deprecated(reason: "Use urls") favorite: Boolean tag_ids: [ID!] "This should be a URL or a base64 encoded data URL" @@ -135,7 +140,8 @@ input BulkPerformerUpdateInput { clientMutationId: String ids: [ID!] disambiguation: String - url: String + url: String @deprecated(reason: "Use urls") + urls: BulkUpdateStrings gender: GenderEnum birthdate: String ethnicity: String @@ -150,8 +156,8 @@ input BulkPerformerUpdateInput { tattoos: String piercings: String alias_list: BulkUpdateStrings - twitter: String - instagram: String + twitter: String @deprecated(reason: "Use urls") + instagram: String @deprecated(reason: "Use urls") favorite: Boolean tag_ids: BulkUpdateIds # rating expressed as 1-100 diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index a5bb9f9057d..eca01d15ed8 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -26,6 +26,11 @@ type SceneMovie { scene_index: Int } +type SceneGroup { + group: Group! + scene_index: Int +} + type VideoCaption { language_code: String! caption_type: String! @@ -68,7 +73,8 @@ type Scene { scene_markers: [SceneMarker!]! galleries: [Gallery!]! studio: Studio - movies: [SceneMovie!]! + groups: [SceneGroup!]! + movies: [SceneMovie!]! @deprecated(reason: "Use groups") tags: [Tag!]! performers: [Performer!]! stash_ids: [StashID!]! @@ -82,6 +88,11 @@ input SceneMovieInput { scene_index: Int } +input SceneGroupInput { + group_id: ID! + scene_index: Int +} + input SceneCreateInput { title: String code: String @@ -96,7 +107,8 @@ input SceneCreateInput { studio_id: ID gallery_ids: [ID!] performer_ids: [ID!] - movies: [SceneMovieInput!] + groups: [SceneGroupInput!] + movies: [SceneMovieInput!] @deprecated(reason: "Use groups") tag_ids: [ID!] "This should be a URL or a base64 encoded data URL" cover_image: String @@ -128,7 +140,8 @@ input SceneUpdateInput { studio_id: ID gallery_ids: [ID!] performer_ids: [ID!] - movies: [SceneMovieInput!] + groups: [SceneGroupInput!] + movies: [SceneMovieInput!] @deprecated(reason: "Use groups") tag_ids: [ID!] "This should be a URL or a base64 encoded data URL" cover_image: String @@ -175,7 +188,8 @@ input BulkSceneUpdateInput { gallery_ids: BulkUpdateIds performer_ids: BulkUpdateIds tag_ids: BulkUpdateIds - movie_ids: BulkUpdateIds + group_ids: BulkUpdateIds + movie_ids: BulkUpdateIds @deprecated(reason: "Use group_ids") } input SceneDestroyInput { diff --git a/graphql/schema/types/scraped-group.graphql b/graphql/schema/types/scraped-group.graphql new file mode 100644 index 00000000000..e490f32bbb4 --- /dev/null +++ b/graphql/schema/types/scraped-group.graphql @@ -0,0 +1,65 @@ +"A movie from a scraping operation..." +type ScrapedMovie { + stored_id: ID + name: String + aliases: String + duration: String + date: String + rating: String + director: String + url: String @deprecated(reason: "use urls") + urls: [String!] + synopsis: String + studio: ScrapedStudio + tags: [ScrapedTag!] + + "This should be a base64 encoded data URL" + front_image: String + "This should be a base64 encoded data URL" + back_image: String +} + +input ScrapedMovieInput { + name: String + aliases: String + duration: String + date: String + rating: String + director: String + url: String @deprecated(reason: "use urls") + urls: [String!] + synopsis: String + # not including tags for the input +} + +"A group from a scraping operation..." +type ScrapedGroup { + stored_id: ID + name: String + aliases: String + duration: String + date: String + rating: String + director: String + urls: [String!] + synopsis: String + studio: ScrapedStudio + tags: [ScrapedTag!] + + "This should be a base64 encoded data URL" + front_image: String + "This should be a base64 encoded data URL" + back_image: String +} + +input ScrapedGroupInput { + name: String + aliases: String + duration: String + date: String + rating: String + director: String + urls: [String!] + synopsis: String + # not including tags for the input +} diff --git a/graphql/schema/types/scraped-movie.graphql b/graphql/schema/types/scraped-movie.graphql deleted file mode 100644 index e3110b8e178..00000000000 --- a/graphql/schema/types/scraped-movie.graphql +++ /dev/null @@ -1,29 +0,0 @@ -"A movie from a scraping operation..." -type ScrapedMovie { - stored_id: ID - name: String - aliases: String - duration: String - date: String - rating: String - director: String - url: String - synopsis: String - studio: ScrapedStudio - - "This should be a base64 encoded data URL" - front_image: String - "This should be a base64 encoded data URL" - back_image: String -} - -input ScrapedMovieInput { - name: String - aliases: String - duration: String - date: String - rating: String - director: String - url: String - synopsis: String -} diff --git a/graphql/schema/types/scraped-performer.graphql b/graphql/schema/types/scraped-performer.graphql index 92ba94d325d..487c89516de 100644 --- a/graphql/schema/types/scraped-performer.graphql +++ b/graphql/schema/types/scraped-performer.graphql @@ -5,9 +5,10 @@ type ScrapedPerformer { name: String disambiguation: String gender: String - url: String - twitter: String - instagram: String + url: String @deprecated(reason: "use urls") + urls: [String!] + twitter: String @deprecated(reason: "use urls") + instagram: String @deprecated(reason: "use urls") birthdate: String ethnicity: String country: String @@ -40,9 +41,10 @@ input ScrapedPerformerInput { name: String disambiguation: String gender: String - url: String - twitter: String - instagram: String + url: String @deprecated(reason: "use urls") + urls: [String!] + twitter: String @deprecated(reason: "use urls") + instagram: String @deprecated(reason: "use urls") birthdate: String ethnicity: String country: String diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql index 958aff5d228..d49df1b2ba4 100644 --- a/graphql/schema/types/scraper.graphql +++ b/graphql/schema/types/scraper.graphql @@ -11,6 +11,7 @@ enum ScrapeType { enum ScrapeContentType { GALLERY MOVIE + GROUP PERFORMER SCENE } @@ -22,6 +23,7 @@ union ScrapedContent = | ScrapedScene | ScrapedGallery | ScrapedMovie + | ScrapedGroup | ScrapedPerformer type ScraperSpec { @@ -40,7 +42,9 @@ type Scraper { "Details for gallery scraper" gallery: ScraperSpec "Details for movie scraper" - movie: ScraperSpec + movie: ScraperSpec @deprecated(reason: "use group") + "Details for group scraper" + group: ScraperSpec } type ScrapedStudio { @@ -76,7 +80,8 @@ type ScrapedScene { studio: ScrapedStudio tags: [ScrapedTag!] performers: [ScrapedPerformer!] - movies: [ScrapedMovie!] + movies: [ScrapedMovie!] @deprecated(reason: "use groups") + groups: [ScrapedGroup!] remote_site_id: String duration: Int @@ -128,7 +133,7 @@ input ScraperSourceInput { stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") "Stash-box endpoint" stash_box_endpoint: String - "Scraper ID to scrape with. Should be unset if stash_box_index is set" + "Scraper ID to scrape with. Should be unset if stash_box_endpoint/stash_box_index is set" scraper_id: ID } @@ -137,7 +142,7 @@ type ScraperSource { stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") "Stash-box endpoint" stash_box_endpoint: String - "Scraper ID to scrape with. Should be unset if stash_box_index is set" + "Scraper ID to scrape with. Should be unset if stash_box_endpoint/stash_box_index is set" scraper_id: ID } @@ -190,13 +195,24 @@ input ScrapeSingleMovieInput { query: String "Instructs to query by movie id" movie_id: ID - "Instructs to query by gallery fragment" + "Instructs to query by movie fragment" movie_input: ScrapedMovieInput } +input ScrapeSingleGroupInput { + "Instructs to query by string" + query: String + "Instructs to query by group id" + group_id: ID + "Instructs to query by group fragment" + group_input: ScrapedGroupInput +} + input StashBoxSceneQueryInput { "Index of the configured stash-box instance to use" - stash_box_index: Int! + stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") + "Endpoint of the stash-box instance to use" + stash_box_endpoint: String "Instructs query by scene fingerprints" scene_ids: [ID!] "Query by query string" @@ -205,7 +221,9 @@ input StashBoxSceneQueryInput { input StashBoxPerformerQueryInput { "Index of the configured stash-box instance to use" - stash_box_index: Int! + stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") + "Endpoint of the stash-box instance to use" + stash_box_endpoint: String "Instructs query by scene fingerprints" performer_ids: [ID!] "Query by query string" @@ -226,7 +244,9 @@ type StashBoxFingerprint { "If neither ids nor names are set, tag all items" input StashBoxBatchTagInput { "Stash endpoint to use for the tagging" - endpoint: Int! + endpoint: Int @deprecated(reason: "use stash_box_endpoint") + "Endpoint of the stash-box instance to use" + stash_box_endpoint: String "Fields to exclude when executing the tagging" exclude_fields: [String!] "Refresh items already tagged by StashBox if true. Only tag items with no StashBox tagging if false" diff --git a/graphql/schema/types/stash-box.graphql b/graphql/schema/types/stash-box.graphql index 865311e4ae4..71ea757f443 100644 --- a/graphql/schema/types/stash-box.graphql +++ b/graphql/schema/types/stash-box.graphql @@ -22,10 +22,12 @@ input StashIDInput { input StashBoxFingerprintSubmissionInput { scene_ids: [String!]! - stash_box_index: Int! + stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") + stash_box_endpoint: String } input StashBoxDraftSubmissionInput { id: String! - stash_box_index: Int! + stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") + stash_box_endpoint: String } diff --git a/graphql/schema/types/stats.graphql b/graphql/schema/types/stats.graphql index 3675c2a6bb2..6d78c919bfe 100644 --- a/graphql/schema/types/stats.graphql +++ b/graphql/schema/types/stats.graphql @@ -7,7 +7,8 @@ type StatsResultType { gallery_count: Int! performer_count: Int! studio_count: Int! - movie_count: Int! + group_count: Int! + movie_count: Int! @deprecated(reason: "use group_count instead") tag_count: Int! total_o_count: Int! total_play_duration: Float! diff --git a/graphql/schema/types/studio.graphql b/graphql/schema/types/studio.graphql index ff4eb5011c6..7823bf0c4ae 100644 --- a/graphql/schema/types/studio.graphql +++ b/graphql/schema/types/studio.graphql @@ -5,6 +5,7 @@ type Studio { parent_studio: Studio child_studios: [Studio!]! aliases: [String!]! + tags: [Tag!]! ignore_auto_tag: Boolean! image_path: String # Resolver @@ -12,7 +13,8 @@ type Studio { image_count(depth: Int): Int! # Resolver gallery_count(depth: Int): Int! # Resolver performer_count(depth: Int): Int! # Resolver - movie_count(depth: Int): Int! # Resolver + group_count(depth: Int): Int! # Resolver + movie_count(depth: Int): Int! @deprecated(reason: "use group_count instead") # Resolver stash_ids: [StashID!]! # rating expressed as 1-100 rating100: Int @@ -20,7 +22,8 @@ type Studio { details: String created_at: Time! updated_at: Time! - movies: [Movie!]! + groups: [Group!]! + movies: [Movie!]! @deprecated(reason: "use groups instead") } input StudioCreateInput { @@ -35,6 +38,7 @@ input StudioCreateInput { favorite: Boolean details: String aliases: [String!] + tag_ids: [ID!] ignore_auto_tag: Boolean } @@ -51,6 +55,7 @@ input StudioUpdateInput { favorite: Boolean details: String aliases: [String!] + tag_ids: [ID!] ignore_auto_tag: Boolean } diff --git a/graphql/schema/types/tag.graphql b/graphql/schema/types/tag.graphql index 69b8221c5f1..3c62c899cc3 100644 --- a/graphql/schema/types/tag.graphql +++ b/graphql/schema/types/tag.graphql @@ -13,6 +13,9 @@ type Tag { image_count(depth: Int): Int! # Resolver gallery_count(depth: Int): Int! # Resolver performer_count(depth: Int): Int! # Resolver + studio_count(depth: Int): Int! # Resolver + group_count(depth: Int): Int! # Resolver + movie_count(depth: Int): Int! @deprecated(reason: "use group_count instead") # Resolver parents: [Tag!]! children: [Tag!]! @@ -60,3 +63,14 @@ input TagsMergeInput { source: [ID!]! destination: ID! } + +input BulkTagUpdateInput { + ids: [ID!] + description: String + aliases: BulkUpdateStrings + ignore_auto_tag: Boolean + favorite: Boolean + + parent_ids: BulkUpdateIds + child_ids: BulkUpdateIds +} diff --git a/internal/api/changeset_translator.go b/internal/api/changeset_translator.go index d148d47dacc..1170088aac9 100644 --- a/internal/api/changeset_translator.go +++ b/internal/api/changeset_translator.go @@ -346,32 +346,75 @@ func (t changesetTranslator) updateStashIDs(value []models.StashID, field string } } -func (t changesetTranslator) relatedMovies(value []models.SceneMovieInput) (models.RelatedMovies, error) { - moviesScenes, err := models.MoviesScenesFromInput(value) +func (t changesetTranslator) relatedGroupsFromMovies(value []models.SceneMovieInput) (models.RelatedGroups, error) { + groupsScenes, err := models.GroupsScenesFromInput(value) if err != nil { - return models.RelatedMovies{}, err + return models.RelatedGroups{}, err } - return models.NewRelatedMovies(moviesScenes), nil + return models.NewRelatedGroups(groupsScenes), nil } -func (t changesetTranslator) updateMovieIDs(value []models.SceneMovieInput, field string) (*models.UpdateMovieIDs, error) { +func groupsScenesFromGroupInput(input []models.SceneGroupInput) ([]models.GroupsScenes, error) { + ret := make([]models.GroupsScenes, len(input)) + + for i, v := range input { + mID, err := strconv.Atoi(v.GroupID) + if err != nil { + return nil, fmt.Errorf("invalid group ID: %s", v.GroupID) + } + + ret[i] = models.GroupsScenes{ + GroupID: mID, + SceneIndex: v.SceneIndex, + } + } + + return ret, nil +} + +func (t changesetTranslator) relatedGroups(value []models.SceneGroupInput) (models.RelatedGroups, error) { + groupsScenes, err := groupsScenesFromGroupInput(value) + if err != nil { + return models.RelatedGroups{}, err + } + + return models.NewRelatedGroups(groupsScenes), nil +} + +func (t changesetTranslator) updateGroupIDsFromMovies(value []models.SceneMovieInput, field string) (*models.UpdateGroupIDs, error) { + if !t.hasField(field) { + return nil, nil + } + + groupsScenes, err := models.GroupsScenesFromInput(value) + if err != nil { + return nil, err + } + + return &models.UpdateGroupIDs{ + Groups: groupsScenes, + Mode: models.RelationshipUpdateModeSet, + }, nil +} + +func (t changesetTranslator) updateGroupIDs(value []models.SceneGroupInput, field string) (*models.UpdateGroupIDs, error) { if !t.hasField(field) { return nil, nil } - moviesScenes, err := models.MoviesScenesFromInput(value) + groupsScenes, err := groupsScenesFromGroupInput(value) if err != nil { return nil, err } - return &models.UpdateMovieIDs{ - Movies: moviesScenes, + return &models.UpdateGroupIDs{ + Groups: groupsScenes, Mode: models.RelationshipUpdateModeSet, }, nil } -func (t changesetTranslator) updateMovieIDsBulk(value *BulkUpdateIds, field string) (*models.UpdateMovieIDs, error) { +func (t changesetTranslator) updateGroupIDsBulk(value *BulkUpdateIds, field string) (*models.UpdateGroupIDs, error) { if !t.hasField(field) || value == nil { return nil, nil } @@ -381,13 +424,74 @@ func (t changesetTranslator) updateMovieIDsBulk(value *BulkUpdateIds, field stri return nil, fmt.Errorf("converting ids [%v]: %w", value.Ids, err) } - movies := make([]models.MoviesScenes, len(ids)) + groups := make([]models.GroupsScenes, len(ids)) for i, id := range ids { - movies[i] = models.MoviesScenes{MovieID: id} + groups[i] = models.GroupsScenes{GroupID: id} + } + + return &models.UpdateGroupIDs{ + Groups: groups, + Mode: value.Mode, + }, nil +} + +func groupsDescriptionsFromGroupInput(input []*GroupDescriptionInput) ([]models.GroupIDDescription, error) { + ret := make([]models.GroupIDDescription, len(input)) + + for i, v := range input { + gID, err := strconv.Atoi(v.GroupID) + if err != nil { + return nil, fmt.Errorf("invalid group ID: %s", v.GroupID) + } + + ret[i] = models.GroupIDDescription{ + GroupID: gID, + } + if v.Description != nil { + ret[i].Description = *v.Description + } + } + + return ret, nil +} + +func (t changesetTranslator) groupIDDescriptions(value []*GroupDescriptionInput) (models.RelatedGroupDescriptions, error) { + groupsScenes, err := groupsDescriptionsFromGroupInput(value) + if err != nil { + return models.RelatedGroupDescriptions{}, err + } + + return models.NewRelatedGroupDescriptions(groupsScenes), nil +} + +func (t changesetTranslator) updateGroupIDDescriptions(value []*GroupDescriptionInput, field string) (*models.UpdateGroupDescriptions, error) { + if !t.hasField(field) { + return nil, nil + } + + groupsScenes, err := groupsDescriptionsFromGroupInput(value) + if err != nil { + return nil, err + } + + return &models.UpdateGroupDescriptions{ + Groups: groupsScenes, + Mode: models.RelationshipUpdateModeSet, + }, nil +} + +func (t changesetTranslator) updateGroupIDDescriptionsBulk(value *BulkUpdateGroupDescriptionsInput, field string) (*models.UpdateGroupDescriptions, error) { + if !t.hasField(field) || value == nil { + return nil, nil + } + + groups, err := groupsDescriptionsFromGroupInput(value.Groups) + if err != nil { + return nil, err } - return &models.UpdateMovieIDs{ - Movies: movies, + return &models.UpdateGroupDescriptions{ + Groups: groups, Mode: value.Mode, }, nil } diff --git a/internal/api/context_keys.go b/internal/api/context_keys.go index a8ab0afb50e..b3a7d135bbf 100644 --- a/internal/api/context_keys.go +++ b/internal/api/context_keys.go @@ -5,11 +5,11 @@ package api type key int const ( - // galleryKey key = 0 - performerKey key = iota + 1 + galleryKey key = 0 + performerKey sceneKey studioKey - movieKey + groupKey tagKey downloadKey imageKey diff --git a/internal/api/doc.go b/internal/api/doc.go new file mode 100644 index 00000000000..a0498359ad7 --- /dev/null +++ b/internal/api/doc.go @@ -0,0 +1,2 @@ +// Package api provides the HTTP and Graphql API for the application. +package api diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index 397b57d04f6..fca3e6c1842 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -1,10 +1,14 @@ +// Package loaders contains the dataloaders used by the resolver in [api]. +// They are generated with `make generate-dataloaders`. +// The dataloaders are used to batch requests to the database. + //go:generate go run github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene //go:generate go run github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery //go:generate go run github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image //go:generate go run github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer //go:generate go run github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio //go:generate go run github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag -//go:generate go run github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie +//go:generate go run github.com/vektah/dataloaden GroupLoader int *github.com/stashapp/stash/pkg/models.Group //go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File //go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID @@ -52,7 +56,7 @@ type Loaders struct { PerformerByID *PerformerLoader StudioByID *StudioLoader TagByID *TagLoader - MovieByID *MovieLoader + GroupByID *GroupLoader FileByID *FileLoader } @@ -94,10 +98,10 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchTags(ctx), }, - MovieByID: &MovieLoader{ + GroupByID: &GroupLoader{ wait: wait, maxBatch: maxBatch, - fetch: m.fetchMovies(ctx), + fetch: m.fetchGroups(ctx), }, FileByID: &FileLoader{ wait: wait, @@ -232,11 +236,11 @@ func (m Middleware) fetchTags(ctx context.Context) func(keys []int) ([]*models.T } } -func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models.Movie, []error) { - return func(keys []int) (ret []*models.Movie, errs []error) { +func (m Middleware) fetchGroups(ctx context.Context) func(keys []int) ([]*models.Group, []error) { + return func(keys []int) (ret []*models.Group, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { var err error - ret, err = m.Repository.Movie.FindMany(ctx, keys) + ret, err = m.Repository.Group.FindMany(ctx, keys) return err }) return ret, toErrorSlice(err) diff --git a/internal/api/loaders/movieloader_gen.go b/internal/api/loaders/grouploader_gen.go similarity index 68% rename from internal/api/loaders/movieloader_gen.go rename to internal/api/loaders/grouploader_gen.go index 3783d3a4125..e892f63c20e 100644 --- a/internal/api/loaders/movieloader_gen.go +++ b/internal/api/loaders/grouploader_gen.go @@ -9,10 +9,10 @@ import ( "github.com/stashapp/stash/pkg/models" ) -// MovieLoaderConfig captures the config to create a new MovieLoader -type MovieLoaderConfig struct { +// GroupLoaderConfig captures the config to create a new GroupLoader +type GroupLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([]*models.Movie, []error) + Fetch func(keys []int) ([]*models.Group, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -21,19 +21,19 @@ type MovieLoaderConfig struct { MaxBatch int } -// NewMovieLoader creates a new MovieLoader given a fetch, wait, and maxBatch -func NewMovieLoader(config MovieLoaderConfig) *MovieLoader { - return &MovieLoader{ +// NewGroupLoader creates a new GroupLoader given a fetch, wait, and maxBatch +func NewGroupLoader(config GroupLoaderConfig) *GroupLoader { + return &GroupLoader{ fetch: config.Fetch, wait: config.Wait, maxBatch: config.MaxBatch, } } -// MovieLoader batches and caches requests -type MovieLoader struct { +// GroupLoader batches and caches requests +type GroupLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([]*models.Movie, []error) + fetch func(keys []int) ([]*models.Group, []error) // how long to done before sending a batch wait time.Duration @@ -44,51 +44,51 @@ type MovieLoader struct { // INTERNAL // lazily created cache - cache map[int]*models.Movie + cache map[int]*models.Group // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners - batch *movieLoaderBatch + batch *groupLoaderBatch // mutex to prevent races mu sync.Mutex } -type movieLoaderBatch struct { +type groupLoaderBatch struct { keys []int - data []*models.Movie + data []*models.Group error []error closing bool done chan struct{} } -// Load a Movie by key, batching and caching will be applied automatically -func (l *MovieLoader) Load(key int) (*models.Movie, error) { +// Load a Group by key, batching and caching will be applied automatically +func (l *GroupLoader) Load(key int) (*models.Group, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a Movie. +// LoadThunk returns a function that when called will block waiting for a Group. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *MovieLoader) LoadThunk(key int) func() (*models.Movie, error) { +func (l *GroupLoader) LoadThunk(key int) func() (*models.Group, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (*models.Movie, error) { + return func() (*models.Group, error) { return it, nil } } if l.batch == nil { - l.batch = &movieLoaderBatch{done: make(chan struct{})} + l.batch = &groupLoaderBatch{done: make(chan struct{})} } batch := l.batch pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (*models.Movie, error) { + return func() (*models.Group, error) { <-batch.done - var data *models.Movie + var data *models.Group if pos < len(batch.data) { data = batch.data[pos] } @@ -113,43 +113,43 @@ func (l *MovieLoader) LoadThunk(key int) func() (*models.Movie, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *MovieLoader) LoadAll(keys []int) ([]*models.Movie, []error) { - results := make([]func() (*models.Movie, error), len(keys)) +func (l *GroupLoader) LoadAll(keys []int) ([]*models.Group, []error) { + results := make([]func() (*models.Group, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - movies := make([]*models.Movie, len(keys)) + groups := make([]*models.Group, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - movies[i], errors[i] = thunk() + groups[i], errors[i] = thunk() } - return movies, errors + return groups, errors } -// LoadAllThunk returns a function that when called will block waiting for a Movies. +// LoadAllThunk returns a function that when called will block waiting for a Groups. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *MovieLoader) LoadAllThunk(keys []int) func() ([]*models.Movie, []error) { - results := make([]func() (*models.Movie, error), len(keys)) +func (l *GroupLoader) LoadAllThunk(keys []int) func() ([]*models.Group, []error) { + results := make([]func() (*models.Group, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]*models.Movie, []error) { - movies := make([]*models.Movie, len(keys)) + return func() ([]*models.Group, []error) { + groups := make([]*models.Group, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - movies[i], errors[i] = thunk() + groups[i], errors[i] = thunk() } - return movies, errors + return groups, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *MovieLoader) Prime(key int, value *models.Movie) bool { +func (l *GroupLoader) Prime(key int, value *models.Group) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -163,22 +163,22 @@ func (l *MovieLoader) Prime(key int, value *models.Movie) bool { } // Clear the value at key from the cache, if it exists -func (l *MovieLoader) Clear(key int) { +func (l *GroupLoader) Clear(key int) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *MovieLoader) unsafeSet(key int, value *models.Movie) { +func (l *GroupLoader) unsafeSet(key int, value *models.Group) { if l.cache == nil { - l.cache = map[int]*models.Movie{} + l.cache = map[int]*models.Group{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *movieLoaderBatch) keyIndex(l *MovieLoader, key int) int { +func (b *groupLoaderBatch) keyIndex(l *GroupLoader, key int) int { for i, existingKey := range b.keys { if key == existingKey { return i @@ -202,7 +202,7 @@ func (b *movieLoaderBatch) keyIndex(l *MovieLoader, key int) int { return pos } -func (b *movieLoaderBatch) startTimer(l *MovieLoader) { +func (b *groupLoaderBatch) startTimer(l *GroupLoader) { time.Sleep(l.wait) l.mu.Lock() @@ -218,7 +218,7 @@ func (b *movieLoaderBatch) startTimer(l *MovieLoader) { b.end(l) } -func (b *movieLoaderBatch) end(l *MovieLoader) { +func (b *groupLoaderBatch) end(l *GroupLoader) { b.data, b.error = l.fetch(b.keys) close(b.done) } diff --git a/internal/api/resolver.go b/internal/api/resolver.go index 50adea9adc0..ab6eead7e5e 100644 --- a/internal/api/resolver.go +++ b/internal/api/resolver.go @@ -37,6 +37,7 @@ type Resolver struct { sceneService manager.SceneService imageService manager.ImageService galleryService manager.GalleryService + groupService manager.GroupService hookExecutor hookExecutor } @@ -72,9 +73,14 @@ func (r *Resolver) SceneMarker() SceneMarkerResolver { func (r *Resolver) Studio() StudioResolver { return &studioResolver{r} } + +func (r *Resolver) Group() GroupResolver { + return &groupResolver{r} +} func (r *Resolver) Movie() MovieResolver { - return &movieResolver{r} + return &movieResolver{&groupResolver{r}} } + func (r *Resolver) Subscription() SubscriptionResolver { return &subscriptionResolver{r} } @@ -111,7 +117,11 @@ type sceneResolver struct{ *Resolver } type sceneMarkerResolver struct{ *Resolver } type imageResolver struct{ *Resolver } type studioResolver struct{ *Resolver } -type movieResolver struct{ *Resolver } + +// movie is group under the hood +type groupResolver struct{ *Resolver } +type movieResolver struct{ *groupResolver } + type tagResolver struct{ *Resolver } type galleryFileResolver struct{ *Resolver } type videoFileResolver struct{ *Resolver } @@ -173,7 +183,7 @@ func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) { galleryQB := repo.Gallery studioQB := repo.Studio performerQB := repo.Performer - movieQB := repo.Movie + movieQB := repo.Group tagQB := repo.Tag // embrace the error @@ -218,7 +228,7 @@ func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) { return err } - moviesCount, err := movieQB.Count(ctx) + groupsCount, err := movieQB.Count(ctx) if err != nil { return err } @@ -262,7 +272,8 @@ func (r *queryResolver) Stats(ctx context.Context) (*StatsResultType, error) { GalleryCount: galleryCount, PerformerCount: performersCount, StudioCount: studiosCount, - MovieCount: moviesCount, + GroupCount: groupsCount, + MovieCount: groupsCount, TagCount: tagsCount, TotalOCount: totalOCount, TotalPlayDuration: totalPlayDuration, diff --git a/internal/api/resolver_model_gallery.go b/internal/api/resolver_model_gallery.go index 3057843e012..9dc68b4c451 100644 --- a/internal/api/resolver_model_gallery.go +++ b/internal/api/resolver_model_gallery.go @@ -2,8 +2,10 @@ package api import ( "context" + "fmt" "github.com/stashapp/stash/internal/api/loaders" + "github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/image" @@ -189,3 +191,28 @@ func (r *galleryResolver) Urls(ctx context.Context, obj *models.Gallery) ([]stri return obj.URLs.List(), nil } + +func (r *galleryResolver) Paths(ctx context.Context, obj *models.Gallery) (*GalleryPathsType, error) { + baseURL, _ := ctx.Value(BaseURLCtxKey).(string) + builder := urlbuilders.NewGalleryURLBuilder(baseURL, obj) + + return &GalleryPathsType{ + Cover: builder.GetCoverURL(), + Preview: builder.GetPreviewURL(), + }, nil +} + +func (r *galleryResolver) Image(ctx context.Context, obj *models.Gallery, index int) (ret *models.Image, err error) { + if index < 0 { + return nil, fmt.Errorf("index must >= 0") + } + + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + ret, err = r.repository.Image.FindByGalleryIDIndex(ctx, obj.ID, uint(index)) + return err + }); err != nil { + return nil, err + } + + return +} diff --git a/internal/api/resolver_model_movie.go b/internal/api/resolver_model_movie.go index e08d99471c3..04018d81fbb 100644 --- a/internal/api/resolver_model_movie.go +++ b/internal/api/resolver_model_movie.go @@ -5,10 +5,12 @@ import ( "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" + "github.com/stashapp/stash/pkg/group" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scene" ) -func (r *movieResolver) Date(ctx context.Context, obj *models.Movie) (*string, error) { +func (r *groupResolver) Date(ctx context.Context, obj *models.Group) (*string, error) { if obj.Date != nil { result := obj.Date.String() return &result, nil @@ -16,11 +18,40 @@ func (r *movieResolver) Date(ctx context.Context, obj *models.Movie) (*string, e return nil, nil } -func (r *movieResolver) Rating100(ctx context.Context, obj *models.Movie) (*int, error) { +func (r *groupResolver) Rating100(ctx context.Context, obj *models.Group) (*int, error) { return obj.Rating, nil } -func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *models.Studio, err error) { +func (r *groupResolver) URL(ctx context.Context, obj *models.Group) (*string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Group) + }); err != nil { + return nil, err + } + } + + urls := obj.URLs.List() + if len(urls) == 0 { + return nil, nil + } + + return &urls[0], nil +} + +func (r *groupResolver) Urls(ctx context.Context, obj *models.Group) ([]string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Group) + }); err != nil { + return nil, err + } + } + + return obj.URLs.List(), nil +} + +func (r *groupResolver) Studio(ctx context.Context, obj *models.Group) (ret *models.Studio, err error) { if obj.StudioID == nil { return nil, nil } @@ -28,26 +59,102 @@ func (r *movieResolver) Studio(ctx context.Context, obj *models.Movie) (ret *mod return loaders.From(ctx).StudioByID.Load(*obj.StudioID) } -func (r *movieResolver) FrontImagePath(ctx context.Context, obj *models.Movie) (*string, error) { +func (r groupResolver) Tags(ctx context.Context, obj *models.Group) (ret []*models.Tag, err error) { + if !obj.TagIDs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadTagIDs(ctx, r.repository.Group) + }); err != nil { + return nil, err + } + } + + var errs []error + ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List()) + return ret, firstError(errs) +} + +func (r groupResolver) relatedGroups(ctx context.Context, rgd models.RelatedGroupDescriptions) (ret []*GroupDescription, err error) { + // rgd must be loaded + gds := rgd.List() + ids := make([]int, len(gds)) + for i, gd := range gds { + ids[i] = gd.GroupID + } + + groups, errs := loaders.From(ctx).GroupByID.LoadAll(ids) + + err = firstError(errs) + if err != nil { + return + } + + ret = make([]*GroupDescription, len(groups)) + for i, group := range groups { + ret[i] = &GroupDescription{Group: group} + d := gds[i].Description + if d != "" { + ret[i].Description = &d + } + } + + return ret, firstError(errs) +} + +func (r groupResolver) ContainingGroups(ctx context.Context, obj *models.Group) (ret []*GroupDescription, err error) { + if !obj.ContainingGroups.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadContainingGroupIDs(ctx, r.repository.Group) + }); err != nil { + return nil, err + } + } + + return r.relatedGroups(ctx, obj.ContainingGroups) +} + +func (r groupResolver) SubGroups(ctx context.Context, obj *models.Group) (ret []*GroupDescription, err error) { + if !obj.SubGroups.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadSubGroupIDs(ctx, r.repository.Group) + }); err != nil { + return nil, err + } + } + + return r.relatedGroups(ctx, obj.SubGroups) +} + +func (r *groupResolver) SubGroupCount(ctx context.Context, obj *models.Group, depth *int) (ret int, err error) { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + ret, err = group.CountByContainingGroupID(ctx, r.repository.Group, obj.ID, depth) + return err + }); err != nil { + return 0, err + } + + return ret, nil +} + +func (r *groupResolver) FrontImagePath(ctx context.Context, obj *models.Group) (*string, error) { var hasImage bool if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error - hasImage, err = r.repository.Movie.HasFrontImage(ctx, obj.ID) + hasImage, err = r.repository.Group.HasFrontImage(ctx, obj.ID) return err }); err != nil { return nil, err } baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieFrontImageURL(hasImage) + imagePath := urlbuilders.NewGroupURLBuilder(baseURL, obj).GetGroupFrontImageURL(hasImage) return &imagePath, nil } -func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (*string, error) { +func (r *groupResolver) BackImagePath(ctx context.Context, obj *models.Group) (*string, error) { var hasImage bool if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error - hasImage, err = r.repository.Movie.HasBackImage(ctx, obj.ID) + hasImage, err = r.repository.Group.HasBackImage(ctx, obj.ID) return err }); err != nil { return nil, err @@ -59,13 +166,13 @@ func (r *movieResolver) BackImagePath(ctx context.Context, obj *models.Movie) (* } baseURL, _ := ctx.Value(BaseURLCtxKey).(string) - imagePath := urlbuilders.NewMovieURLBuilder(baseURL, obj).GetMovieBackImageURL() + imagePath := urlbuilders.NewGroupURLBuilder(baseURL, obj).GetGroupBackImageURL() return &imagePath, nil } -func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret int, err error) { +func (r *groupResolver) SceneCount(ctx context.Context, obj *models.Group, depth *int) (ret int, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Scene.CountByMovieID(ctx, obj.ID) + ret, err = scene.CountByGroupID(ctx, r.repository.Scene, obj.ID, depth) return err }); err != nil { return 0, err @@ -74,10 +181,10 @@ func (r *movieResolver) SceneCount(ctx context.Context, obj *models.Movie) (ret return ret, nil } -func (r *movieResolver) Scenes(ctx context.Context, obj *models.Movie) (ret []*models.Scene, err error) { +func (r *groupResolver) Scenes(ctx context.Context, obj *models.Group) (ret []*models.Scene, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error - ret, err = r.repository.Scene.FindByMovieID(ctx, obj.ID) + ret, err = r.repository.Scene.FindByGroupID(ctx, obj.ID) return err }); err != nil { return nil, err diff --git a/internal/api/resolver_model_performer.go b/internal/api/resolver_model_performer.go index 6164ff29728..b6f6af369ad 100644 --- a/internal/api/resolver_model_performer.go +++ b/internal/api/resolver_model_performer.go @@ -24,6 +24,79 @@ func (r *performerResolver) AliasList(ctx context.Context, obj *models.Performer return obj.Aliases.List(), nil } +func (r *performerResolver) URL(ctx context.Context, obj *models.Performer) (*string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Performer) + }); err != nil { + return nil, err + } + } + + urls := obj.URLs.List() + if len(urls) == 0 { + return nil, nil + } + + return &urls[0], nil +} + +func (r *performerResolver) Twitter(ctx context.Context, obj *models.Performer) (*string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Performer) + }); err != nil { + return nil, err + } + } + + urls := obj.URLs.List() + + // find the first twitter url + for _, url := range urls { + if performer.IsTwitterURL(url) { + u := url + return &u, nil + } + } + + return nil, nil +} + +func (r *performerResolver) Instagram(ctx context.Context, obj *models.Performer) (*string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Performer) + }); err != nil { + return nil, err + } + } + + urls := obj.URLs.List() + + // find the first instagram url + for _, url := range urls { + if performer.IsInstagramURL(url) { + u := url + return &u, nil + } + } + + return nil, nil +} + +func (r *performerResolver) Urls(ctx context.Context, obj *models.Performer) ([]string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Performer) + }); err != nil { + return nil, err + } + } + + return obj.URLs.List(), nil +} + func (r *performerResolver) Height(ctx context.Context, obj *models.Performer) (*string, error) { if obj.Height != nil { ret := strconv.Itoa(*obj.Height) @@ -106,9 +179,9 @@ func (r *performerResolver) GalleryCount(ctx context.Context, obj *models.Perfor return ret, nil } -func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performer) (ret int, err error) { +func (r *performerResolver) GroupCount(ctx context.Context, obj *models.Performer) (ret int, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Movie.CountByPerformerID(ctx, obj.ID) + ret, err = r.repository.Group.CountByPerformerID(ctx, obj.ID) return err }); err != nil { return 0, err @@ -117,6 +190,11 @@ func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performe return ret, nil } +// deprecated +func (r *performerResolver) MovieCount(ctx context.Context, obj *models.Performer) (ret int, err error) { + return r.GroupCount(ctx, obj) +} + func (r *performerResolver) PerformerCount(ctx context.Context, obj *models.Performer) (ret int, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { ret, err = performer.CountByAppearsWith(ctx, r.repository.Performer, obj.ID) @@ -179,9 +257,9 @@ func (r *performerResolver) DeathDate(ctx context.Context, obj *models.Performer return nil, nil } -func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Movie, err error) { +func (r *performerResolver) Groups(ctx context.Context, obj *models.Performer) (ret []*models.Group, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Movie.FindByPerformerID(ctx, obj.ID) + ret, err = r.repository.Group.FindByPerformerID(ctx, obj.ID) return err }); err != nil { return nil, err @@ -189,3 +267,8 @@ func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) ( return ret, nil } + +// deprecated +func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Group, err error) { + return r.Groups(ctx, obj) +} diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 2376ca22727..2600c9538aa 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -184,20 +184,20 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *mod } func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) { - if !obj.Movies.Loaded() { + if !obj.Groups.Loaded() { if err := r.withReadTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene - return obj.LoadMovies(ctx, qb) + return obj.LoadGroups(ctx, qb) }); err != nil { return nil, err } } - loader := loaders.From(ctx).MovieByID + loader := loaders.From(ctx).GroupByID - for _, sm := range obj.Movies.List() { - movie, err := loader.Load(sm.MovieID) + for _, sm := range obj.Groups.List() { + movie, err := loader.Load(sm.GroupID) if err != nil { return nil, err } @@ -214,6 +214,37 @@ func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*S return ret, nil } +func (r *sceneResolver) Groups(ctx context.Context, obj *models.Scene) (ret []*SceneGroup, err error) { + if !obj.Groups.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Scene + + return obj.LoadGroups(ctx, qb) + }); err != nil { + return nil, err + } + } + + loader := loaders.From(ctx).GroupByID + + for _, sm := range obj.Groups.List() { + group, err := loader.Load(sm.GroupID) + if err != nil { + return nil, err + } + + sceneIdx := sm.SceneIndex + sceneGroup := &SceneGroup{ + Group: group, + SceneIndex: sceneIdx, + } + + ret = append(ret, sceneGroup) + } + + return ret, nil +} + func (r *sceneResolver) Tags(ctx context.Context, obj *models.Scene) (ret []*models.Tag, err error) { if !obj.TagIDs.Loaded() { if err := r.withReadTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_model_studio.go b/internal/api/resolver_model_studio.go index f7bc3a00df2..2111039c86b 100644 --- a/internal/api/resolver_model_studio.go +++ b/internal/api/resolver_model_studio.go @@ -6,9 +6,9 @@ import ( "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/group" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/movie" "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/scene" ) @@ -40,6 +40,20 @@ func (r *studioResolver) Aliases(ctx context.Context, obj *models.Studio) ([]str return obj.Aliases.List(), nil } +func (r *studioResolver) Tags(ctx context.Context, obj *models.Studio) (ret []*models.Tag, err error) { + if !obj.TagIDs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadTagIDs(ctx, r.repository.Studio) + }); err != nil { + return nil, err + } + } + + var errs []error + ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.TagIDs.List()) + return ret, firstError(errs) +} + func (r *studioResolver) SceneCount(ctx context.Context, obj *models.Studio, depth *int) (ret int, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { ret, err = scene.CountByStudioID(ctx, r.repository.Scene, obj.ID, depth) @@ -84,9 +98,9 @@ func (r *studioResolver) PerformerCount(ctx context.Context, obj *models.Studio, return ret, nil } -func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio, depth *int) (ret int, err error) { +func (r *studioResolver) GroupCount(ctx context.Context, obj *models.Studio, depth *int) (ret int, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = movie.CountByStudioID(ctx, r.repository.Movie, obj.ID, depth) + ret, err = group.CountByStudioID(ctx, r.repository.Group, obj.ID, depth) return err }); err != nil { return 0, err @@ -95,6 +109,11 @@ func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio, dep return ret, nil } +// deprecated +func (r *studioResolver) MovieCount(ctx context.Context, obj *models.Studio, depth *int) (ret int, err error) { + return r.GroupCount(ctx, obj, depth) +} + func (r *studioResolver) ParentStudio(ctx context.Context, obj *models.Studio) (ret *models.Studio, err error) { if obj.ParentID == nil { return nil, nil @@ -130,9 +149,9 @@ func (r *studioResolver) Rating100(ctx context.Context, obj *models.Studio) (*in return obj.Rating, nil } -func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Movie, err error) { +func (r *studioResolver) Groups(ctx context.Context, obj *models.Studio) (ret []*models.Group, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Movie.FindByStudioID(ctx, obj.ID) + ret, err = r.repository.Group.FindByStudioID(ctx, obj.ID) return err }); err != nil { return nil, err @@ -140,3 +159,8 @@ func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret [] return ret, nil } + +// deprecated +func (r *studioResolver) Movies(ctx context.Context, obj *models.Studio) (ret []*models.Group, err error) { + return r.Groups(ctx, obj) +} diff --git a/internal/api/resolver_model_tag.go b/internal/api/resolver_model_tag.go index 9124b18f483..14237d2fe41 100644 --- a/internal/api/resolver_model_tag.go +++ b/internal/api/resolver_model_tag.go @@ -3,45 +3,55 @@ package api import ( "context" + "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/group" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/scene" + "github.com/stashapp/stash/pkg/studio" ) func (r *tagResolver) Parents(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) { - if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Tag.FindByChildTagID(ctx, obj.ID) - return err - }); err != nil { - return nil, err + if !obj.ParentIDs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadParentIDs(ctx, r.repository.Tag) + }); err != nil { + return nil, err + } } - return ret, nil + var errs []error + ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.ParentIDs.List()) + return ret, firstError(errs) } func (r *tagResolver) Children(ctx context.Context, obj *models.Tag) (ret []*models.Tag, err error) { - if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Tag.FindByParentTagID(ctx, obj.ID) - return err - }); err != nil { - return nil, err + if !obj.ChildIDs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadChildIDs(ctx, r.repository.Tag) + }); err != nil { + return nil, err + } } - return ret, nil + var errs []error + ret, errs = loaders.From(ctx).TagByID.LoadAll(obj.ChildIDs.List()) + return ret, firstError(errs) } func (r *tagResolver) Aliases(ctx context.Context, obj *models.Tag) (ret []string, err error) { - if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Tag.GetAliases(ctx, obj.ID) - return err - }); err != nil { - return nil, err + if !obj.Aliases.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadAliases(ctx, r.repository.Tag) + }); err != nil { + return nil, err + } } - return ret, err + return obj.Aliases.List(), nil } func (r *tagResolver) SceneCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) { @@ -99,6 +109,32 @@ func (r *tagResolver) PerformerCount(ctx context.Context, obj *models.Tag, depth return ret, nil } +func (r *tagResolver) StudioCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + ret, err = studio.CountByTagID(ctx, r.repository.Studio, obj.ID, depth) + return err + }); err != nil { + return 0, err + } + + return ret, nil +} + +func (r *tagResolver) GroupCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + ret, err = group.CountByTagID(ctx, r.repository.Group, obj.ID, depth) + return err + }); err != nil { + return 0, err + } + + return ret, nil +} + +func (r *tagResolver) MovieCount(ctx context.Context, obj *models.Tag, depth *int) (ret int, err error) { + return r.GroupCount(ctx, obj, depth) +} + func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, error) { var hasImage bool if err := r.withReadTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go index 2df6f1b7751..5d5cd4b3716 100644 --- a/internal/api/resolver_mutation_gallery.go +++ b/internal/api/resolver_mutation_gallery.go @@ -478,6 +478,61 @@ func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input Galler return true, nil } +func (r *mutationResolver) SetGalleryCover(ctx context.Context, input GallerySetCoverInput) (bool, error) { + galleryID, err := strconv.Atoi(input.GalleryID) + if err != nil { + return false, fmt.Errorf("converting gallery id: %w", err) + } + + coverImageID, err := strconv.Atoi(input.CoverImageID) + if err != nil { + return false, fmt.Errorf("converting cover image id: %w", err) + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Gallery + gallery, err := qb.Find(ctx, galleryID) + if err != nil { + return err + } + + if gallery == nil { + return fmt.Errorf("gallery with id %d not found", galleryID) + } + + return r.galleryService.SetCover(ctx, gallery, coverImageID) + }); err != nil { + return false, err + } + + return true, nil +} + +func (r *mutationResolver) ResetGalleryCover(ctx context.Context, input GalleryResetCoverInput) (bool, error) { + galleryID, err := strconv.Atoi(input.GalleryID) + if err != nil { + return false, fmt.Errorf("converting gallery id: %w", err) + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Gallery + gallery, err := qb.Find(ctx, galleryID) + if err != nil { + return err + } + + if gallery == nil { + return fmt.Errorf("gallery with id %d not found", galleryID) + } + + return r.galleryService.ResetCover(ctx, gallery) + }); err != nil { + return false, err + } + + return true, nil +} + func (r *mutationResolver) getGalleryChapter(ctx context.Context, id int) (ret *models.GalleryChapter, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.GalleryChapter.Find(ctx, id) diff --git a/internal/api/resolver_mutation_group.go b/internal/api/resolver_mutation_group.go new file mode 100644 index 00000000000..d75994d1497 --- /dev/null +++ b/internal/api/resolver_mutation_group.go @@ -0,0 +1,413 @@ +package api + +import ( + "context" + "fmt" + "strconv" + + "github.com/stashapp/stash/internal/static" + "github.com/stashapp/stash/pkg/group" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/plugin/hook" + "github.com/stashapp/stash/pkg/sliceutil/stringslice" + "github.com/stashapp/stash/pkg/utils" +) + +func groupFromGroupCreateInput(ctx context.Context, input GroupCreateInput) (*models.Group, error) { + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate a new group from the input + newGroup := models.NewGroup() + + newGroup.Name = input.Name + newGroup.Aliases = translator.string(input.Aliases) + newGroup.Duration = input.Duration + newGroup.Rating = input.Rating100 + newGroup.Director = translator.string(input.Director) + newGroup.Synopsis = translator.string(input.Synopsis) + + var err error + + newGroup.Date, err = translator.datePtr(input.Date) + if err != nil { + return nil, fmt.Errorf("converting date: %w", err) + } + newGroup.StudioID, err = translator.intPtrFromString(input.StudioID) + if err != nil { + return nil, fmt.Errorf("converting studio id: %w", err) + } + + newGroup.TagIDs, err = translator.relatedIds(input.TagIds) + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + + newGroup.ContainingGroups, err = translator.groupIDDescriptions(input.ContainingGroups) + if err != nil { + return nil, fmt.Errorf("converting containing group ids: %w", err) + } + + newGroup.SubGroups, err = translator.groupIDDescriptions(input.SubGroups) + if err != nil { + return nil, fmt.Errorf("converting containing group ids: %w", err) + } + + if input.Urls != nil { + newGroup.URLs = models.NewRelatedStrings(input.Urls) + } + + return &newGroup, nil +} + +func (r *mutationResolver) GroupCreate(ctx context.Context, input GroupCreateInput) (*models.Group, error) { + newGroup, err := groupFromGroupCreateInput(ctx, input) + if err != nil { + return nil, err + } + + // Process the base 64 encoded image string + var frontimageData []byte + if input.FrontImage != nil { + frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) + if err != nil { + return nil, fmt.Errorf("processing front image: %w", err) + } + } + + // Process the base 64 encoded image string + var backimageData []byte + if input.BackImage != nil { + backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) + if err != nil { + return nil, fmt.Errorf("processing back image: %w", err) + } + } + + // HACK: if back image is being set, set the front image to the default. + // This is because we can't have a null front image with a non-null back image. + if len(frontimageData) == 0 && len(backimageData) != 0 { + frontimageData = static.ReadAll(static.DefaultGroupImage) + } + + // Start the transaction and save the group + if err := r.withTxn(ctx, func(ctx context.Context) error { + if err = r.groupService.Create(ctx, newGroup, frontimageData, backimageData); err != nil { + return err + } + + return nil + }); err != nil { + return nil, err + } + + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.GroupCreatePost, input, nil) + r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.MovieCreatePost, input, nil) + return r.getGroup(ctx, newGroup.ID) +} + +func groupPartialFromGroupUpdateInput(translator changesetTranslator, input GroupUpdateInput) (ret models.GroupPartial, err error) { + // Populate group from the input + updatedGroup := models.NewGroupPartial() + + updatedGroup.Name = translator.optionalString(input.Name, "name") + updatedGroup.Aliases = translator.optionalString(input.Aliases, "aliases") + updatedGroup.Duration = translator.optionalInt(input.Duration, "duration") + updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100") + updatedGroup.Director = translator.optionalString(input.Director, "director") + updatedGroup.Synopsis = translator.optionalString(input.Synopsis, "synopsis") + + updatedGroup.Date, err = translator.optionalDate(input.Date, "date") + if err != nil { + err = fmt.Errorf("converting date: %w", err) + return + } + updatedGroup.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") + if err != nil { + err = fmt.Errorf("converting studio id: %w", err) + return + } + + updatedGroup.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids") + if err != nil { + err = fmt.Errorf("converting tag ids: %w", err) + return + } + + updatedGroup.ContainingGroups, err = translator.updateGroupIDDescriptions(input.ContainingGroups, "containing_groups") + if err != nil { + err = fmt.Errorf("converting containing group ids: %w", err) + return + } + + updatedGroup.SubGroups, err = translator.updateGroupIDDescriptions(input.SubGroups, "sub_groups") + if err != nil { + err = fmt.Errorf("converting containing group ids: %w", err) + return + } + + updatedGroup.URLs = translator.updateStrings(input.Urls, "urls") + + return updatedGroup, nil +} + +func (r *mutationResolver) GroupUpdate(ctx context.Context, input GroupUpdateInput) (*models.Group, error) { + groupID, err := strconv.Atoi(input.ID) + if err != nil { + return nil, fmt.Errorf("converting id: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + updatedGroup, err := groupPartialFromGroupUpdateInput(translator, input) + if err != nil { + return nil, err + } + + var frontimageData []byte + frontImageIncluded := translator.hasField("front_image") + if input.FrontImage != nil { + frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) + if err != nil { + return nil, fmt.Errorf("processing front image: %w", err) + } + } + + var backimageData []byte + backImageIncluded := translator.hasField("back_image") + if input.BackImage != nil { + backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) + if err != nil { + return nil, fmt.Errorf("processing back image: %w", err) + } + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + frontImage := group.ImageInput{ + Image: frontimageData, + Set: frontImageIncluded, + } + + backImage := group.ImageInput{ + Image: backimageData, + Set: backImageIncluded, + } + + _, err = r.groupService.UpdatePartial(ctx, groupID, updatedGroup, frontImage, backImage) + if err != nil { + return err + } + + return nil + }); err != nil { + return nil, err + } + + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, groupID, hook.GroupUpdatePost, input, translator.getFields()) + r.hookExecutor.ExecutePostHooks(ctx, groupID, hook.MovieUpdatePost, input, translator.getFields()) + return r.getGroup(ctx, groupID) +} + +func groupPartialFromBulkGroupUpdateInput(translator changesetTranslator, input BulkGroupUpdateInput) (ret models.GroupPartial, err error) { + updatedGroup := models.NewGroupPartial() + + updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100") + updatedGroup.Director = translator.optionalString(input.Director, "director") + + updatedGroup.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") + if err != nil { + err = fmt.Errorf("converting studio id: %w", err) + return + } + + updatedGroup.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + err = fmt.Errorf("converting tag ids: %w", err) + return + } + + updatedGroup.ContainingGroups, err = translator.updateGroupIDDescriptionsBulk(input.ContainingGroups, "containing_groups") + if err != nil { + err = fmt.Errorf("converting containing group ids: %w", err) + return + } + + updatedGroup.SubGroups, err = translator.updateGroupIDDescriptionsBulk(input.SubGroups, "sub_groups") + if err != nil { + err = fmt.Errorf("converting containing group ids: %w", err) + return + } + + updatedGroup.URLs = translator.optionalURLsBulk(input.Urls, nil) + + return updatedGroup, nil +} + +func (r *mutationResolver) BulkGroupUpdate(ctx context.Context, input BulkGroupUpdateInput) ([]*models.Group, error) { + groupIDs, err := stringslice.StringSliceToIntSlice(input.Ids) + if err != nil { + return nil, fmt.Errorf("converting ids: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate group from the input + updatedGroup, err := groupPartialFromBulkGroupUpdateInput(translator, input) + if err != nil { + return nil, err + } + + ret := []*models.Group{} + + if err := r.withTxn(ctx, func(ctx context.Context) error { + for _, groupID := range groupIDs { + group, err := r.groupService.UpdatePartial(ctx, groupID, updatedGroup, group.ImageInput{}, group.ImageInput{}) + if err != nil { + return err + } + + ret = append(ret, group) + } + + return nil + }); err != nil { + return nil, err + } + + var newRet []*models.Group + for _, group := range ret { + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, group.ID, hook.GroupUpdatePost, input, translator.getFields()) + r.hookExecutor.ExecutePostHooks(ctx, group.ID, hook.MovieUpdatePost, input, translator.getFields()) + + group, err = r.getGroup(ctx, group.ID) + if err != nil { + return nil, err + } + + newRet = append(newRet, group) + } + + return newRet, nil +} + +func (r *mutationResolver) GroupDestroy(ctx context.Context, input GroupDestroyInput) (bool, error) { + id, err := strconv.Atoi(input.ID) + if err != nil { + return false, fmt.Errorf("converting id: %w", err) + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + return r.repository.Group.Destroy(ctx, id) + }); err != nil { + return false, err + } + + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, id, hook.GroupDestroyPost, input, nil) + r.hookExecutor.ExecutePostHooks(ctx, id, hook.MovieDestroyPost, input, nil) + + return true, nil +} + +func (r *mutationResolver) GroupsDestroy(ctx context.Context, groupIDs []string) (bool, error) { + ids, err := stringslice.StringSliceToIntSlice(groupIDs) + if err != nil { + return false, fmt.Errorf("converting ids: %w", err) + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Group + for _, id := range ids { + if err := qb.Destroy(ctx, id); err != nil { + return err + } + } + + return nil + }); err != nil { + return false, err + } + + for _, id := range ids { + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, id, hook.GroupDestroyPost, groupIDs, nil) + r.hookExecutor.ExecutePostHooks(ctx, id, hook.MovieDestroyPost, groupIDs, nil) + } + + return true, nil +} + +func (r *mutationResolver) AddGroupSubGroups(ctx context.Context, input GroupSubGroupAddInput) (bool, error) { + groupID, err := strconv.Atoi(input.ContainingGroupID) + if err != nil { + return false, fmt.Errorf("converting group id: %w", err) + } + + subGroups, err := groupsDescriptionsFromGroupInput(input.SubGroups) + if err != nil { + return false, fmt.Errorf("converting sub group ids: %w", err) + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + return r.groupService.AddSubGroups(ctx, groupID, subGroups, input.InsertIndex) + }); err != nil { + return false, err + } + + return true, nil +} + +func (r *mutationResolver) RemoveGroupSubGroups(ctx context.Context, input GroupSubGroupRemoveInput) (bool, error) { + groupID, err := strconv.Atoi(input.ContainingGroupID) + if err != nil { + return false, fmt.Errorf("converting group id: %w", err) + } + + subGroupIDs, err := stringslice.StringSliceToIntSlice(input.SubGroupIds) + if err != nil { + return false, fmt.Errorf("converting sub group ids: %w", err) + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + return r.groupService.RemoveSubGroups(ctx, groupID, subGroupIDs) + }); err != nil { + return false, err + } + + return true, nil +} + +func (r *mutationResolver) ReorderSubGroups(ctx context.Context, input ReorderSubGroupsInput) (bool, error) { + groupID, err := strconv.Atoi(input.GroupID) + if err != nil { + return false, fmt.Errorf("converting group id: %w", err) + } + + subGroupIDs, err := stringslice.StringSliceToIntSlice(input.SubGroupIds) + if err != nil { + return false, fmt.Errorf("converting sub group ids: %w", err) + } + + insertPointID, err := strconv.Atoi(input.InsertAtID) + if err != nil { + return false, fmt.Errorf("converting insert at id: %w", err) + } + + insertAfter := utils.IsTrue(input.InsertAfter) + + if err := r.withTxn(ctx, func(ctx context.Context) error { + return r.groupService.ReorderSubGroups(ctx, groupID, subGroupIDs, insertPointID, insertAfter) + }); err != nil { + return false, err + } + + return true, nil +} diff --git a/internal/api/resolver_mutation_movie.go b/internal/api/resolver_mutation_movie.go index cb447465419..2e1011083c0 100644 --- a/internal/api/resolver_mutation_movie.go +++ b/internal/api/resolver_mutation_movie.go @@ -12,10 +12,10 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -// used to refetch movie after hooks run -func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Movie, err error) { +// used to refetch group after hooks run +func (r *mutationResolver) getGroup(ctx context.Context, id int) (ret *models.Group, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Movie.Find(ctx, id) + ret, err = r.repository.Group.Find(ctx, id) return err }); err != nil { return nil, err @@ -24,33 +24,43 @@ func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Mo return ret, nil } -func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInput) (*models.Movie, error) { +func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInput) (*models.Group, error) { translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } - // Populate a new movie from the input - newMovie := models.NewMovie() + // Populate a new group from the input + newGroup := models.NewGroup() - newMovie.Name = input.Name - newMovie.Aliases = translator.string(input.Aliases) - newMovie.Duration = input.Duration - newMovie.Rating = input.Rating100 - newMovie.Director = translator.string(input.Director) - newMovie.Synopsis = translator.string(input.Synopsis) - newMovie.URL = translator.string(input.URL) + newGroup.Name = input.Name + newGroup.Aliases = translator.string(input.Aliases) + newGroup.Duration = input.Duration + newGroup.Rating = input.Rating100 + newGroup.Director = translator.string(input.Director) + newGroup.Synopsis = translator.string(input.Synopsis) var err error - newMovie.Date, err = translator.datePtr(input.Date) + newGroup.Date, err = translator.datePtr(input.Date) if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - newMovie.StudioID, err = translator.intPtrFromString(input.StudioID) + newGroup.StudioID, err = translator.intPtrFromString(input.StudioID) if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } + newGroup.TagIDs, err = translator.relatedIds(input.TagIds) + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + + if input.Urls != nil { + newGroup.URLs = models.NewRelatedStrings(input.Urls) + } else if input.URL != nil { + newGroup.URLs = models.NewRelatedStrings([]string{*input.URL}) + } + // Process the base 64 encoded image string var frontimageData []byte if input.FrontImage != nil { @@ -72,27 +82,27 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp // HACK: if back image is being set, set the front image to the default. // This is because we can't have a null front image with a non-null back image. if len(frontimageData) == 0 && len(backimageData) != 0 { - frontimageData = static.ReadAll(static.DefaultMovieImage) + frontimageData = static.ReadAll(static.DefaultGroupImage) } - // Start the transaction and save the movie + // Start the transaction and save the group if err := r.withTxn(ctx, func(ctx context.Context) error { - qb := r.repository.Movie + qb := r.repository.Group - err = qb.Create(ctx, &newMovie) + err = qb.Create(ctx, &newGroup) if err != nil { return err } // update image table if len(frontimageData) > 0 { - if err := qb.UpdateFrontImage(ctx, newMovie.ID, frontimageData); err != nil { + if err := qb.UpdateFrontImage(ctx, newGroup.ID, frontimageData); err != nil { return err } } if len(backimageData) > 0 { - if err := qb.UpdateBackImage(ctx, newMovie.ID, backimageData); err != nil { + if err := qb.UpdateBackImage(ctx, newGroup.ID, backimageData); err != nil { return err } } @@ -102,12 +112,14 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp return nil, err } - r.hookExecutor.ExecutePostHooks(ctx, newMovie.ID, hook.MovieCreatePost, input, nil) - return r.getMovie(ctx, newMovie.ID) + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.GroupCreatePost, input, nil) + r.hookExecutor.ExecutePostHooks(ctx, newGroup.ID, hook.MovieCreatePost, input, nil) + return r.getGroup(ctx, newGroup.ID) } -func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInput) (*models.Movie, error) { - movieID, err := strconv.Atoi(input.ID) +func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInput) (*models.Group, error) { + groupID, err := strconv.Atoi(input.ID) if err != nil { return nil, fmt.Errorf("converting id: %w", err) } @@ -116,26 +128,32 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp inputMap: getUpdateInputMap(ctx), } - // Populate movie from the input - updatedMovie := models.NewMoviePartial() + // Populate group from the input + updatedGroup := models.NewGroupPartial() - updatedMovie.Name = translator.optionalString(input.Name, "name") - updatedMovie.Aliases = translator.optionalString(input.Aliases, "aliases") - updatedMovie.Duration = translator.optionalInt(input.Duration, "duration") - updatedMovie.Rating = translator.optionalInt(input.Rating100, "rating100") - updatedMovie.Director = translator.optionalString(input.Director, "director") - updatedMovie.Synopsis = translator.optionalString(input.Synopsis, "synopsis") - updatedMovie.URL = translator.optionalString(input.URL, "url") + updatedGroup.Name = translator.optionalString(input.Name, "name") + updatedGroup.Aliases = translator.optionalString(input.Aliases, "aliases") + updatedGroup.Duration = translator.optionalInt(input.Duration, "duration") + updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100") + updatedGroup.Director = translator.optionalString(input.Director, "director") + updatedGroup.Synopsis = translator.optionalString(input.Synopsis, "synopsis") - updatedMovie.Date, err = translator.optionalDate(input.Date, "date") + updatedGroup.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - updatedMovie.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") + updatedGroup.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } + updatedGroup.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + + updatedGroup.URLs = translator.optionalURLs(input.Urls, input.URL) + var frontimageData []byte frontImageIncluded := translator.hasField("front_image") if input.FrontImage != nil { @@ -154,24 +172,24 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp } } - // Start the transaction and save the movie - var movie *models.Movie + // Start the transaction and save the group + var group *models.Group if err := r.withTxn(ctx, func(ctx context.Context) error { - qb := r.repository.Movie - movie, err = qb.UpdatePartial(ctx, movieID, updatedMovie) + qb := r.repository.Group + group, err = qb.UpdatePartial(ctx, groupID, updatedGroup) if err != nil { return err } // update image table if frontImageIncluded { - if err := qb.UpdateFrontImage(ctx, movie.ID, frontimageData); err != nil { + if err := qb.UpdateFrontImage(ctx, group.ID, frontimageData); err != nil { return err } } if backImageIncluded { - if err := qb.UpdateBackImage(ctx, movie.ID, backimageData); err != nil { + if err := qb.UpdateBackImage(ctx, group.ID, backimageData); err != nil { return err } } @@ -181,12 +199,14 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp return nil, err } - r.hookExecutor.ExecutePostHooks(ctx, movie.ID, hook.MovieUpdatePost, input, translator.getFields()) - return r.getMovie(ctx, movie.ID) + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, group.ID, hook.GroupUpdatePost, input, translator.getFields()) + r.hookExecutor.ExecutePostHooks(ctx, group.ID, hook.MovieUpdatePost, input, translator.getFields()) + return r.getGroup(ctx, group.ID) } -func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieUpdateInput) ([]*models.Movie, error) { - movieIDs, err := stringslice.StringSliceToIntSlice(input.Ids) +func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieUpdateInput) ([]*models.Group, error) { + groupIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { return nil, fmt.Errorf("converting ids: %w", err) } @@ -195,29 +215,36 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieU inputMap: getUpdateInputMap(ctx), } - // Populate movie from the input - updatedMovie := models.NewMoviePartial() + // Populate group from the input + updatedGroup := models.NewGroupPartial() - updatedMovie.Rating = translator.optionalInt(input.Rating100, "rating100") - updatedMovie.Director = translator.optionalString(input.Director, "director") + updatedGroup.Rating = translator.optionalInt(input.Rating100, "rating100") + updatedGroup.Director = translator.optionalString(input.Director, "director") - updatedMovie.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") + updatedGroup.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } - ret := []*models.Movie{} + updatedGroup.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + + updatedGroup.URLs = translator.optionalURLsBulk(input.Urls, nil) + + ret := []*models.Group{} if err := r.withTxn(ctx, func(ctx context.Context) error { - qb := r.repository.Movie + qb := r.repository.Group - for _, movieID := range movieIDs { - movie, err := qb.UpdatePartial(ctx, movieID, updatedMovie) + for _, groupID := range groupIDs { + group, err := qb.UpdatePartial(ctx, groupID, updatedGroup) if err != nil { return err } - ret = append(ret, movie) + ret = append(ret, group) } return nil @@ -225,16 +252,18 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieU return nil, err } - var newRet []*models.Movie - for _, movie := range ret { - r.hookExecutor.ExecutePostHooks(ctx, movie.ID, hook.MovieUpdatePost, input, translator.getFields()) + var newRet []*models.Group + for _, group := range ret { + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, group.ID, hook.GroupUpdatePost, input, translator.getFields()) + r.hookExecutor.ExecutePostHooks(ctx, group.ID, hook.MovieUpdatePost, input, translator.getFields()) - movie, err = r.getMovie(ctx, movie.ID) + group, err = r.getGroup(ctx, group.ID) if err != nil { return nil, err } - newRet = append(newRet, movie) + newRet = append(newRet, group) } return newRet, nil @@ -247,24 +276,26 @@ func (r *mutationResolver) MovieDestroy(ctx context.Context, input MovieDestroyI } if err := r.withTxn(ctx, func(ctx context.Context) error { - return r.repository.Movie.Destroy(ctx, id) + return r.repository.Group.Destroy(ctx, id) }); err != nil { return false, err } + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, id, hook.GroupDestroyPost, input, nil) r.hookExecutor.ExecutePostHooks(ctx, id, hook.MovieDestroyPost, input, nil) return true, nil } -func (r *mutationResolver) MoviesDestroy(ctx context.Context, movieIDs []string) (bool, error) { - ids, err := stringslice.StringSliceToIntSlice(movieIDs) +func (r *mutationResolver) MoviesDestroy(ctx context.Context, groupIDs []string) (bool, error) { + ids, err := stringslice.StringSliceToIntSlice(groupIDs) if err != nil { return false, fmt.Errorf("converting ids: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { - qb := r.repository.Movie + qb := r.repository.Group for _, id := range ids { if err := qb.Destroy(ctx, id); err != nil { return err @@ -277,7 +308,9 @@ func (r *mutationResolver) MoviesDestroy(ctx context.Context, movieIDs []string) } for _, id := range ids { - r.hookExecutor.ExecutePostHooks(ctx, id, hook.MovieDestroyPost, movieIDs, nil) + // for backwards compatibility - run both movie and group hooks + r.hookExecutor.ExecutePostHooks(ctx, id, hook.GroupDestroyPost, groupIDs, nil) + r.hookExecutor.ExecutePostHooks(ctx, id, hook.MovieDestroyPost, groupIDs, nil) } return true, nil diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go index 202778e74d5..7263cc70966 100644 --- a/internal/api/resolver_mutation_performer.go +++ b/internal/api/resolver_mutation_performer.go @@ -12,6 +12,11 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +const ( + twitterURL = "https://twitter.com" + instagramURL = "https://instagram.com" +) + // used to refetch performer after hooks run func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *models.Performer, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -35,7 +40,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.Name = input.Name newPerformer.Disambiguation = translator.string(input.Disambiguation) newPerformer.Aliases = models.NewRelatedStrings(input.AliasList) - newPerformer.URL = translator.string(input.URL) newPerformer.Gender = input.Gender newPerformer.Ethnicity = translator.string(input.Ethnicity) newPerformer.Country = translator.string(input.Country) @@ -47,8 +51,6 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.CareerLength = translator.string(input.CareerLength) newPerformer.Tattoos = translator.string(input.Tattoos) newPerformer.Piercings = translator.string(input.Piercings) - newPerformer.Twitter = translator.string(input.Twitter) - newPerformer.Instagram = translator.string(input.Instagram) newPerformer.Favorite = translator.bool(input.Favorite) newPerformer.Rating = input.Rating100 newPerformer.Details = translator.string(input.Details) @@ -58,6 +60,21 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per newPerformer.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) newPerformer.StashIDs = models.NewRelatedStashIDs(input.StashIds) + newPerformer.URLs = models.NewRelatedStrings([]string{}) + if input.URL != nil { + newPerformer.URLs.Add(*input.URL) + } + if input.Twitter != nil { + newPerformer.URLs.Add(utils.URLFromHandle(*input.Twitter, twitterURL)) + } + if input.Instagram != nil { + newPerformer.URLs.Add(utils.URLFromHandle(*input.Instagram, instagramURL)) + } + + if input.Urls != nil { + newPerformer.URLs.Add(input.Urls...) + } + var err error newPerformer.Birthdate, err = translator.datePtr(input.Birthdate) @@ -112,6 +129,96 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per return r.getPerformer(ctx, newPerformer.ID) } +func (r *mutationResolver) validateNoLegacyURLs(translator changesetTranslator) error { + // ensure url/twitter/instagram are not included in the input + if translator.hasField("url") { + return fmt.Errorf("url field must not be included if urls is included") + } + if translator.hasField("twitter") { + return fmt.Errorf("twitter field must not be included if urls is included") + } + if translator.hasField("instagram") { + return fmt.Errorf("instagram field must not be included if urls is included") + } + + return nil +} + +func (r *mutationResolver) handleLegacyURLs(ctx context.Context, performerID int, legacyURL, legacyTwitter, legacyInstagram models.OptionalString, updatedPerformer *models.PerformerPartial) error { + qb := r.repository.Performer + + // we need to be careful with URL/Twitter/Instagram + // treat URL as replacing the first non-Twitter/Instagram URL in the list + // twitter should replace any existing twitter URL + // instagram should replace any existing instagram URL + p, err := qb.Find(ctx, performerID) + if err != nil { + return err + } + + if err := p.LoadURLs(ctx, qb); err != nil { + return fmt.Errorf("loading performer URLs: %w", err) + } + + existingURLs := p.URLs.List() + + // performer partial URLs should be empty + if legacyURL.Set { + replaced := false + for i, url := range existingURLs { + if !performer.IsTwitterURL(url) && !performer.IsInstagramURL(url) { + existingURLs[i] = legacyURL.Value + replaced = true + break + } + } + + if !replaced { + existingURLs = append(existingURLs, legacyURL.Value) + } + } + + if legacyTwitter.Set { + value := utils.URLFromHandle(legacyTwitter.Value, twitterURL) + found := false + // find and replace the first twitter URL + for i, url := range existingURLs { + if performer.IsTwitterURL(url) { + existingURLs[i] = value + found = true + break + } + } + + if !found { + existingURLs = append(existingURLs, value) + } + } + if legacyInstagram.Set { + found := false + value := utils.URLFromHandle(legacyInstagram.Value, instagramURL) + // find and replace the first instagram URL + for i, url := range existingURLs { + if performer.IsInstagramURL(url) { + existingURLs[i] = value + found = true + break + } + } + + if !found { + existingURLs = append(existingURLs, value) + } + } + + updatedPerformer.URLs = &models.UpdateStrings{ + Values: existingURLs, + Mode: models.RelationshipUpdateModeSet, + } + + return nil +} + func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) { performerID, err := strconv.Atoi(input.ID) if err != nil { @@ -127,7 +234,6 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.Name = translator.optionalString(input.Name, "name") updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation") - updatedPerformer.URL = translator.optionalString(input.URL, "url") updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender") updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity") updatedPerformer.Country = translator.optionalString(input.Country, "country") @@ -139,8 +245,6 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") - updatedPerformer.Twitter = translator.optionalString(input.Twitter, "twitter") - updatedPerformer.Instagram = translator.optionalString(input.Instagram, "instagram") updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite") updatedPerformer.Rating = translator.optionalInt(input.Rating100, "rating100") updatedPerformer.Details = translator.optionalString(input.Details, "details") @@ -149,6 +253,19 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") updatedPerformer.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") + if translator.hasField("urls") { + // ensure url/twitter/instagram are not included in the input + if err := r.validateNoLegacyURLs(translator); err != nil { + return nil, err + } + + updatedPerformer.URLs = translator.updateStrings(input.Urls, "urls") + } + + legacyURL := translator.optionalString(input.URL, "url") + legacyTwitter := translator.optionalString(input.Twitter, "twitter") + legacyInstagram := translator.optionalString(input.Instagram, "instagram") + updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") if err != nil { return nil, fmt.Errorf("converting birthdate: %w", err) @@ -186,6 +303,12 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Performer + if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set { + if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil { + return err + } + } + if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil { return err } @@ -225,7 +348,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer := models.NewPerformerPartial() updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation") - updatedPerformer.URL = translator.optionalString(input.URL, "url") + updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender") updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity") updatedPerformer.Country = translator.optionalString(input.Country, "country") @@ -237,8 +360,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer.CareerLength = translator.optionalString(input.CareerLength, "career_length") updatedPerformer.Tattoos = translator.optionalString(input.Tattoos, "tattoos") updatedPerformer.Piercings = translator.optionalString(input.Piercings, "piercings") - updatedPerformer.Twitter = translator.optionalString(input.Twitter, "twitter") - updatedPerformer.Instagram = translator.optionalString(input.Instagram, "instagram") + updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite") updatedPerformer.Rating = translator.optionalInt(input.Rating100, "rating100") updatedPerformer.Details = translator.optionalString(input.Details, "details") @@ -246,6 +368,19 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer.Weight = translator.optionalInt(input.Weight, "weight") updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + if translator.hasField("urls") { + // ensure url/twitter/instagram are not included in the input + if err := r.validateNoLegacyURLs(translator); err != nil { + return nil, err + } + + updatedPerformer.URLs = translator.updateStringsBulk(input.Urls, "urls") + } + + legacyURL := translator.optionalString(input.URL, "url") + legacyTwitter := translator.optionalString(input.Twitter, "twitter") + legacyInstagram := translator.optionalString(input.Instagram, "instagram") + updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") if err != nil { return nil, fmt.Errorf("converting birthdate: %w", err) @@ -277,6 +412,12 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe qb := r.repository.Performer for _, performerID := range performerIDs { + if legacyURL.Set || legacyTwitter.Set || legacyInstagram.Set { + if err := r.handleLegacyURLs(ctx, performerID, legacyURL, legacyTwitter, legacyInstagram, &updatedPerformer); err != nil { + return err + } + } + if err := performer.ValidateUpdate(ctx, performerID, updatedPerformer, qb); err != nil { return err } diff --git a/internal/api/resolver_mutation_saved_filter.go b/internal/api/resolver_mutation_saved_filter.go index 13b5d87fafa..e49c1214cff 100644 --- a/internal/api/resolver_mutation_saved_filter.go +++ b/internal/api/resolver_mutation_saved_filter.go @@ -7,7 +7,10 @@ import ( "strconv" "strings" + "github.com/mitchellh/mapstructure" + "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput) (ret *models.SavedFilter, err error) { @@ -67,30 +70,48 @@ func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input Destroy } func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaultFilterInput) (bool, error) { - if err := r.withTxn(ctx, func(ctx context.Context) error { - qb := r.repository.SavedFilter + // deprecated - write to the config in the meantime + config := config.GetInstance() + + uiConfig := config.GetUIConfiguration() + if uiConfig == nil { + uiConfig = make(map[string]interface{}) + } - if input.FindFilter == nil && input.ObjectFilter == nil && input.UIOptions == nil { - // clearing - def, err := qb.FindDefault(ctx, input.Mode) - if err != nil { - return err - } + m := utils.NestedMap(uiConfig) - if def != nil { - return qb.Destroy(ctx, def.ID) - } + if input.FindFilter == nil && input.ObjectFilter == nil && input.UIOptions == nil { + // clearing + m.Delete("defaultFilters." + strings.ToLower(input.Mode.String())) + config.SetUIConfiguration(m) - return nil + if err := config.Write(); err != nil { + return false, err } - return qb.SetDefault(ctx, &models.SavedFilter{ - Mode: input.Mode, - FindFilter: input.FindFilter, - ObjectFilter: input.ObjectFilter, - UIOptions: input.UIOptions, - }) - }); err != nil { + return true, nil + } + + subMap := make(map[string]interface{}) + d, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + TagName: "json", + WeaklyTypedInput: true, + Result: &subMap, + }) + + if err != nil { + return false, err + } + + if err := d.Decode(input); err != nil { + return false, err + } + + m.Set("defaultFilters."+strings.ToLower(input.Mode.String()), subMap) + + config.SetUIConfiguration(m) + + if err := config.Write(); err != nil { return false, err } diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index 15bf4514716..ca99dafc150 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -80,9 +80,17 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCr return nil, fmt.Errorf("converting gallery ids: %w", err) } - newScene.Movies, err = translator.relatedMovies(input.Movies) - if err != nil { - return nil, fmt.Errorf("converting movies: %w", err) + // prefer groups over movies + if len(input.Groups) > 0 { + newScene.Groups, err = translator.relatedGroups(input.Groups) + if err != nil { + return nil, fmt.Errorf("converting groups: %w", err) + } + } else if len(input.Movies) > 0 { + newScene.Groups, err = translator.relatedGroupsFromMovies(input.Movies) + if err != nil { + return nil, fmt.Errorf("converting movies: %w", err) + } } var coverImageData []byte @@ -216,9 +224,16 @@ func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTr return nil, fmt.Errorf("converting gallery ids: %w", err) } - updatedScene.MovieIDs, err = translator.updateMovieIDs(input.Movies, "movies") - if err != nil { - return nil, fmt.Errorf("converting movies: %w", err) + if translator.hasField("groups") { + updatedScene.GroupIDs, err = translator.updateGroupIDs(input.Groups, "groups") + if err != nil { + return nil, fmt.Errorf("converting groups: %w", err) + } + } else if translator.hasField("movies") { + updatedScene.GroupIDs, err = translator.updateGroupIDsFromMovies(input.Movies, "movies") + if err != nil { + return nil, fmt.Errorf("converting movies: %w", err) + } } return &updatedScene, nil @@ -358,9 +373,16 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU return nil, fmt.Errorf("converting gallery ids: %w", err) } - updatedScene.MovieIDs, err = translator.updateMovieIDsBulk(input.MovieIds, "movie_ids") - if err != nil { - return nil, fmt.Errorf("converting movie ids: %w", err) + if translator.hasField("group_ids") { + updatedScene.GroupIDs, err = translator.updateGroupIDsBulk(input.GroupIds, "group_ids") + if err != nil { + return nil, fmt.Errorf("converting group ids: %w", err) + } + } else if translator.hasField("movie_ids") { + updatedScene.GroupIDs, err = translator.updateGroupIDsBulk(input.MovieIds, "movie_ids") + if err != nil { + return nil, fmt.Errorf("converting movie ids: %w", err) + } } ret := []*models.Scene{} @@ -825,6 +847,24 @@ func (r *mutationResolver) SceneSaveActivity(ctx context.Context, id string, res return ret, nil } +func (r *mutationResolver) SceneResetActivity(ctx context.Context, id string, resetResume *bool, resetDuration *bool) (ret bool, err error) { + sceneID, err := strconv.Atoi(id) + if err != nil { + return false, fmt.Errorf("converting id: %w", err) + } + + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Scene + + ret, err = qb.ResetActivity(ctx, sceneID, utils.IsTrue(resetResume), utils.IsTrue(resetDuration)) + return err + }); err != nil { + return false, err + } + + return ret, nil +} + // deprecated func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id string) (ret int, err error) { sceneID, err := strconv.Atoi(id) diff --git a/internal/api/resolver_mutation_stash_box.go b/internal/api/resolver_mutation_stash_box.go index 2198ab6ff4a..b853df65e49 100644 --- a/internal/api/resolver_mutation_stash_box.go +++ b/internal/api/resolver_mutation_stash_box.go @@ -6,41 +6,46 @@ import ( "strconv" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/scraper/stashbox" ) func (r *mutationResolver) SubmitStashBoxFingerprints(ctx context.Context, input StashBoxFingerprintSubmissionInput) (bool, error) { - boxes := config.GetInstance().GetStashBoxes() - - if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { - return false, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) + b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint) + if err != nil { + return false, err } - client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.stashboxRepository()) - - return client.SubmitStashBoxFingerprints(ctx, input.SceneIds, boxes[input.StashBoxIndex].Endpoint) + client := r.newStashBoxClient(*b) + return client.SubmitStashBoxFingerprints(ctx, input.SceneIds) } func (r *mutationResolver) StashBoxBatchPerformerTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) { - jobID := manager.GetInstance().StashBoxBatchPerformerTag(ctx, input) + b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) + if err != nil { + return "", err + } + + jobID := manager.GetInstance().StashBoxBatchPerformerTag(ctx, b, input) return strconv.Itoa(jobID), nil } func (r *mutationResolver) StashBoxBatchStudioTag(ctx context.Context, input manager.StashBoxBatchTagInput) (string, error) { - jobID := manager.GetInstance().StashBoxBatchStudioTag(ctx, input) + b, err := resolveStashBoxBatchTagInput(input.Endpoint, input.StashBoxEndpoint) + if err != nil { + return "", err + } + + jobID := manager.GetInstance().StashBoxBatchStudioTag(ctx, b, input) return strconv.Itoa(jobID), nil } func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) { - boxes := config.GetInstance().GetStashBoxes() - - if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { - return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) + b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint) + if err != nil { + return nil, err } - client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.stashboxRepository()) + client := r.newStashBoxClient(*b) id, err := strconv.Atoi(input.ID) if err != nil { @@ -68,7 +73,7 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input S return fmt.Errorf("loading scene URLs: %w", err) } - res, err = client.SubmitSceneDraft(ctx, scene, boxes[input.StashBoxIndex].Endpoint, cover) + res, err = client.SubmitSceneDraft(ctx, scene, cover) return err }) @@ -76,13 +81,12 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input S } func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, input StashBoxDraftSubmissionInput) (*string, error) { - boxes := config.GetInstance().GetStashBoxes() - - if input.StashBoxIndex < 0 || input.StashBoxIndex >= len(boxes) { - return nil, fmt.Errorf("invalid stash_box_index %d", input.StashBoxIndex) + b, err := resolveStashBox(input.StashBoxIndex, input.StashBoxEndpoint) + if err != nil { + return nil, err } - client := stashbox.NewClient(*boxes[input.StashBoxIndex], r.stashboxRepository()) + client := r.newStashBoxClient(*b) id, err := strconv.Atoi(input.ID) if err != nil { @@ -101,7 +105,7 @@ func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, inp return fmt.Errorf("performer with id %d not found", id) } - res, err = client.SubmitPerformerDraft(ctx, performer, boxes[input.StashBoxIndex].Endpoint) + res, err = client.SubmitPerformerDraft(ctx, performer) return err }) diff --git a/internal/api/resolver_mutation_studio.go b/internal/api/resolver_mutation_studio.go index 05d84a97940..a33e5d9b676 100644 --- a/internal/api/resolver_mutation_studio.go +++ b/internal/api/resolver_mutation_studio.go @@ -48,6 +48,11 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input models.Studio return nil, fmt.Errorf("converting parent id: %w", err) } + newStudio.TagIDs, err = translator.relatedIds(input.TagIds) + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + // Process the base 64 encoded image string var imageData []byte if input.Image != nil { @@ -114,6 +119,11 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.Studio return nil, fmt.Errorf("converting parent id: %w", err) } + updatedStudio.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + // Process the base 64 encoded image string var imageData []byte imageIncluded := translator.hasField("image") diff --git a/internal/api/resolver_mutation_tag.go b/internal/api/resolver_mutation_tag.go index 2c3128c58d4..2554f1bb55f 100644 --- a/internal/api/resolver_mutation_tag.go +++ b/internal/api/resolver_mutation_tag.go @@ -33,26 +33,21 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) newTag := models.NewTag() newTag.Name = input.Name + newTag.Aliases = models.NewRelatedStrings(input.Aliases) newTag.Favorite = translator.bool(input.Favorite) newTag.Description = translator.string(input.Description) newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) var err error - var parentIDs []int - if len(input.ParentIds) > 0 { - parentIDs, err = stringslice.StringSliceToIntSlice(input.ParentIds) - if err != nil { - return nil, fmt.Errorf("converting parent ids: %w", err) - } + newTag.ParentIDs, err = translator.relatedIds(input.ParentIds) + if err != nil { + return nil, fmt.Errorf("converting parent tag ids: %w", err) } - var childIDs []int - if len(input.ChildIds) > 0 { - childIDs, err = stringslice.StringSliceToIntSlice(input.ChildIds) - if err != nil { - return nil, fmt.Errorf("converting child ids: %w", err) - } + newTag.ChildIDs, err = translator.relatedIds(input.ChildIds) + if err != nil { + return nil, fmt.Errorf("converting child tag ids: %w", err) } // Process the base 64 encoded image string @@ -68,8 +63,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - // ensure name is unique - if err := tag.EnsureTagNameUnique(ctx, 0, newTag.Name, qb); err != nil { + if err := tag.ValidateCreate(ctx, newTag, qb); err != nil { return err } @@ -85,36 +79,6 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) } } - if len(input.Aliases) > 0 { - if err := tag.EnsureAliasesUnique(ctx, newTag.ID, input.Aliases, qb); err != nil { - return err - } - - if err := qb.UpdateAliases(ctx, newTag.ID, input.Aliases); err != nil { - return err - } - } - - if len(parentIDs) > 0 { - if err := qb.UpdateParentTags(ctx, newTag.ID, parentIDs); err != nil { - return err - } - } - - if len(childIDs) > 0 { - if err := qb.UpdateChildTags(ctx, newTag.ID, childIDs); err != nil { - return err - } - } - - // FIXME: This should be called before any changes are made, but - // requires a rewrite of ValidateHierarchy. - if len(parentIDs) > 0 || len(childIDs) > 0 { - if err := tag.ValidateHierarchy(ctx, &newTag, parentIDs, childIDs, qb); err != nil { - return err - } - } - return nil }); err != nil { return nil, err @@ -137,24 +101,21 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) // Populate tag from the input updatedTag := models.NewTagPartial() + updatedTag.Name = translator.optionalString(input.Name, "name") updatedTag.Favorite = translator.optionalBool(input.Favorite, "favorite") updatedTag.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") updatedTag.Description = translator.optionalString(input.Description, "description") - var parentIDs []int - if translator.hasField("parent_ids") { - parentIDs, err = stringslice.StringSliceToIntSlice(input.ParentIds) - if err != nil { - return nil, fmt.Errorf("converting parent ids: %w", err) - } + updatedTag.Aliases = translator.updateStrings(input.Aliases, "aliases") + + updatedTag.ParentIDs, err = translator.updateIds(input.ParentIds, "parent_ids") + if err != nil { + return nil, fmt.Errorf("converting parent tag ids: %w", err) } - var childIDs []int - if translator.hasField("child_ids") { - childIDs, err = stringslice.StringSliceToIntSlice(input.ChildIds) - if err != nil { - return nil, fmt.Errorf("converting child ids: %w", err) - } + updatedTag.ChildIDs, err = translator.updateIds(input.ChildIds, "child_ids") + if err != nil { + return nil, fmt.Errorf("converting child tag ids: %w", err) } var imageData []byte @@ -171,24 +132,10 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Tag - // ensure name is unique - t, err = qb.Find(ctx, tagID) - if err != nil { + if err := tag.ValidateUpdate(ctx, tagID, updatedTag, qb); err != nil { return err } - if t == nil { - return fmt.Errorf("tag with id %d not found", tagID) - } - - if input.Name != nil && t.Name != *input.Name { - if err := tag.EnsureTagNameUnique(ctx, tagID, *input.Name, qb); err != nil { - return err - } - - updatedTag.Name = models.NewOptionalString(*input.Name) - } - t, err = qb.UpdatePartial(ctx, tagID, updatedTag) if err != nil { return err @@ -201,35 +148,61 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) } } - if translator.hasField("aliases") { - if err := tag.EnsureAliasesUnique(ctx, tagID, input.Aliases, qb); err != nil { - return err - } + return nil + }); err != nil { + return nil, err + } - if err := qb.UpdateAliases(ctx, tagID, input.Aliases); err != nil { - return err - } - } + r.hookExecutor.ExecutePostHooks(ctx, t.ID, hook.TagUpdatePost, input, translator.getFields()) + return r.getTag(ctx, t.ID) +} - if parentIDs != nil { - if err := qb.UpdateParentTags(ctx, tagID, parentIDs); err != nil { - return err - } - } +func (r *mutationResolver) BulkTagUpdate(ctx context.Context, input BulkTagUpdateInput) ([]*models.Tag, error) { + tagIDs, err := stringslice.StringSliceToIntSlice(input.Ids) + if err != nil { + return nil, fmt.Errorf("converting ids: %w", err) + } + + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate scene from the input + updatedTag := models.NewTagPartial() + + updatedTag.Description = translator.optionalString(input.Description, "description") + updatedTag.Favorite = translator.optionalBool(input.Favorite, "favorite") + updatedTag.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + + updatedTag.Aliases = translator.updateStringsBulk(input.Aliases, "aliases") + + updatedTag.ParentIDs, err = translator.updateIdsBulk(input.ParentIds, "parent_ids") + if err != nil { + return nil, fmt.Errorf("converting parent tag ids: %w", err) + } + + updatedTag.ChildIDs, err = translator.updateIdsBulk(input.ChildIds, "child_ids") + if err != nil { + return nil, fmt.Errorf("converting child tag ids: %w", err) + } + + ret := []*models.Tag{} + + // Start the transaction and save the scenes + if err := r.withTxn(ctx, func(ctx context.Context) error { + qb := r.repository.Tag - if childIDs != nil { - if err := qb.UpdateChildTags(ctx, tagID, childIDs); err != nil { + for _, tagID := range tagIDs { + if err := tag.ValidateUpdate(ctx, tagID, updatedTag, qb); err != nil { return err } - } - // FIXME: This should be called before any changes are made, but - // requires a rewrite of ValidateHierarchy. - if parentIDs != nil || childIDs != nil { - if err := tag.ValidateHierarchy(ctx, t, parentIDs, childIDs, qb); err != nil { - logger.Errorf("Error saving tag: %s", err) + tag, err := qb.UpdatePartial(ctx, tagID, updatedTag) + if err != nil { return err } + + ret = append(ret, tag) } return nil @@ -237,8 +210,20 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) return nil, err } - r.hookExecutor.ExecutePostHooks(ctx, t.ID, hook.TagUpdatePost, input, translator.getFields()) - return r.getTag(ctx, t.ID) + // execute post hooks outside of txn + var newRet []*models.Tag + for _, tag := range ret { + r.hookExecutor.ExecutePostHooks(ctx, tag.ID, hook.TagUpdatePost, input, translator.getFields()) + + tag, err = r.getTag(ctx, tag.ID) + if err != nil { + return nil, err + } + + newRet = append(newRet, tag) + } + + return newRet, nil } func (r *mutationResolver) TagDestroy(ctx context.Context, input TagDestroyInput) (bool, error) { @@ -331,7 +316,7 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) return err } - err = tag.ValidateHierarchy(ctx, t, parents, children, qb) + err = tag.ValidateHierarchyExisting(ctx, t, parents, children, qb) if err != nil { logger.Errorf("Error merging tag: %s", err) return err diff --git a/internal/api/resolver_query_find_group.go b/internal/api/resolver_query_find_group.go new file mode 100644 index 00000000000..6f8a6c6bac4 --- /dev/null +++ b/internal/api/resolver_query_find_group.go @@ -0,0 +1,59 @@ +package api + +import ( + "context" + "strconv" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil/stringslice" +) + +func (r *queryResolver) FindGroup(ctx context.Context, id string) (ret *models.Group, err error) { + idInt, err := strconv.Atoi(id) + if err != nil { + return nil, err + } + + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + ret, err = r.repository.Group.Find(ctx, idInt) + return err + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (r *queryResolver) FindGroups(ctx context.Context, groupFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindGroupsResultType, err error) { + idInts, err := stringslice.StringSliceToIntSlice(ids) + if err != nil { + return nil, err + } + + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + var groups []*models.Group + var err error + var total int + + if len(idInts) > 0 { + groups, err = r.repository.Group.FindMany(ctx, idInts) + total = len(groups) + } else { + groups, total, err = r.repository.Group.Query(ctx, groupFilter, filter) + } + + if err != nil { + return err + } + + ret = &FindGroupsResultType{ + Count: total, + Groups: groups, + } + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} diff --git a/internal/api/resolver_query_find_movie.go b/internal/api/resolver_query_find_movie.go index 84e01e28276..2f80d6f5999 100644 --- a/internal/api/resolver_query_find_movie.go +++ b/internal/api/resolver_query_find_movie.go @@ -8,14 +8,14 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) -func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.Movie, err error) { +func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.Group, err error) { idInt, err := strconv.Atoi(id) if err != nil { return nil, err } if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Movie.Find(ctx, idInt) + ret, err = r.repository.Group.Find(ctx, idInt) return err }); err != nil { return nil, err @@ -24,22 +24,22 @@ func (r *queryResolver) FindMovie(ctx context.Context, id string) (ret *models.M return ret, nil } -func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.MovieFilterType, filter *models.FindFilterType, ids []string) (ret *FindMoviesResultType, err error) { +func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.GroupFilterType, filter *models.FindFilterType, ids []string) (ret *FindMoviesResultType, err error) { idInts, err := stringslice.StringSliceToIntSlice(ids) if err != nil { return nil, err } if err := r.withReadTxn(ctx, func(ctx context.Context) error { - var movies []*models.Movie + var groups []*models.Group var err error var total int if len(idInts) > 0 { - movies, err = r.repository.Movie.FindMany(ctx, idInts) - total = len(movies) + groups, err = r.repository.Group.FindMany(ctx, idInts) + total = len(groups) } else { - movies, total, err = r.repository.Movie.Query(ctx, movieFilter, filter) + groups, total, err = r.repository.Group.Query(ctx, movieFilter, filter) } if err != nil { @@ -48,7 +48,7 @@ func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.Movi ret = &FindMoviesResultType{ Count: total, - Movies: movies, + Movies: groups, } return nil }); err != nil { @@ -58,9 +58,9 @@ func (r *queryResolver) FindMovies(ctx context.Context, movieFilter *models.Movi return ret, nil } -func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Movie, err error) { +func (r *queryResolver) AllMovies(ctx context.Context) (ret []*models.Group, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.Movie.All(ctx) + ret, err = r.repository.Group.All(ctx) return err }); err != nil { return nil, err diff --git a/internal/api/resolver_query_find_saved_filter.go b/internal/api/resolver_query_find_saved_filter.go index 4f196fd65d8..1ba68e31d98 100644 --- a/internal/api/resolver_query_find_saved_filter.go +++ b/internal/api/resolver_query_find_saved_filter.go @@ -3,8 +3,12 @@ package api import ( "context" "strconv" + "strings" + "github.com/mitchellh/mapstructure" + "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) func (r *queryResolver) FindSavedFilter(ctx context.Context, id string) (ret *models.SavedFilter, err error) { @@ -37,11 +41,35 @@ func (r *queryResolver) FindSavedFilters(ctx context.Context, mode *models.Filte } func (r *queryResolver) FindDefaultFilter(ctx context.Context, mode models.FilterMode) (ret *models.SavedFilter, err error) { - if err := r.withReadTxn(ctx, func(ctx context.Context) error { - ret, err = r.repository.SavedFilter.FindDefault(ctx, mode) - return err - }); err != nil { + // deprecated - read from the config in the meantime + config := config.GetInstance() + + uiConfig := config.GetUIConfiguration() + if uiConfig == nil { + return nil, nil + } + + m := utils.NestedMap(uiConfig) + filterRaw, _ := m.Get("defaultFilters." + strings.ToLower(mode.String())) + + if filterRaw == nil { + return nil, nil + } + + ret = &models.SavedFilter{} + d, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + TagName: "json", + WeaklyTypedInput: true, + Result: ret, + }) + + if err != nil { return nil, err } - return ret, err + + if err := d.Decode(filterRaw); err != nil { + return nil, err + } + + return ret, nil } diff --git a/internal/api/resolver_query_scraper.go b/internal/api/resolver_query_scraper.go index 5f27db3dec1..b9690cea76c 100644 --- a/internal/api/resolver_query_scraper.go +++ b/internal/api/resolver_query_scraper.go @@ -9,7 +9,6 @@ import ( "strings" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper" @@ -145,6 +144,23 @@ func filterPerformerTags(p []*models.ScrapedPerformer) { } } +// filterGroupTags removes tags matching excluded tag patterns from the provided scraped movies +func filterGroupTags(p []*models.ScrapedMovie) { + excludeRegexps := compileRegexps(manager.GetInstance().Config.GetScraperExcludeTagPatterns()) + + var ignoredTags []string + + for _, s := range p { + var ignored []string + s.Tags, ignored = filterTags(excludeRegexps, s.Tags) + ignoredTags = sliceutil.AppendUniques(ignoredTags, ignored) + } + + if len(ignoredTags) > 0 { + logger.Debugf("Scraping ignored tags: %s", strings.Join(ignoredTags, ", ")) + } +} + func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scraper.ScrapedScene, error) { content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeScene) if err != nil { @@ -187,20 +203,48 @@ func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models return nil, err } - return marshalScrapedMovie(content) + ret, err := marshalScrapedMovie(content) + if err != nil { + return nil, err + } + + filterGroupTags([]*models.ScrapedMovie{ret}) + + return ret, nil } -func (r *queryResolver) getStashBoxClient(index int) (*stashbox.Client, error) { - boxes := config.GetInstance().GetStashBoxes() +func (r *queryResolver) ScrapeGroupURL(ctx context.Context, url string) (*models.ScrapedGroup, error) { + content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeMovie) + if err != nil { + return nil, err + } - if index < 0 || index >= len(boxes) { - return nil, fmt.Errorf("%w: invalid stash_box_index %d", ErrInput, index) + ret, err := marshalScrapedMovie(content) + if err != nil { + return nil, err } - return stashbox.NewClient(*boxes[index], r.stashboxRepository()), nil -} + filterGroupTags([]*models.ScrapedMovie{ret}) + + // convert to scraped group + group := &models.ScrapedGroup{ + StoredID: ret.StoredID, + Name: ret.Name, + Aliases: ret.Aliases, + Duration: ret.Duration, + Date: ret.Date, + Rating: ret.Rating, + Director: ret.Director, + URLs: ret.URLs, + Synopsis: ret.Synopsis, + Studio: ret.Studio, + Tags: ret.Tags, + FrontImage: ret.FrontImage, + BackImage: ret.BackImage, + } -// FIXME - in the following resolvers, we're processing the deprecated field and not processing the new endpoint input + return group, nil +} func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.Source, input ScrapeSingleSceneInput) ([]*scraper.ScrapedScene, error) { var ret []*scraper.ScrapedScene @@ -245,12 +289,14 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So if err != nil { return nil, err } - case source.StashBoxIndex != nil: - client, err := r.getStashBoxClient(*source.StashBoxIndex) + case source.StashBoxIndex != nil || source.StashBoxEndpoint != nil: + b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint) if err != nil { return nil, err } + client := r.newStashBoxClient(*b) + switch { case input.SceneID != nil: ret, err = client.FindStashBoxSceneByFingerprints(ctx, sceneID) @@ -275,12 +321,14 @@ func (r *queryResolver) ScrapeSingleScene(ctx context.Context, source scraper.So func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.Source, input ScrapeMultiScenesInput) ([][]*scraper.ScrapedScene, error) { if source.ScraperID != nil { return nil, ErrNotImplemented - } else if source.StashBoxIndex != nil { - client, err := r.getStashBoxClient(*source.StashBoxIndex) + } else if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil { + b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint) if err != nil { return nil, err } + client := r.newStashBoxClient(*b) + sceneIDs, err := stringslice.StringSliceToIntSlice(input.SceneIds) if err != nil { return nil, err @@ -293,12 +341,14 @@ func (r *queryResolver) ScrapeMultiScenes(ctx context.Context, source scraper.So } func (r *queryResolver) ScrapeSingleStudio(ctx context.Context, source scraper.Source, input ScrapeSingleStudioInput) ([]*models.ScrapedStudio, error) { - if source.StashBoxIndex != nil { - client, err := r.getStashBoxClient(*source.StashBoxIndex) + if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil { + b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint) if err != nil { return nil, err } + client := r.newStashBoxClient(*b) + var ret []*models.ScrapedStudio out, err := client.FindStashBoxStudio(ctx, *input.Query) @@ -346,13 +396,14 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scrape default: return nil, ErrNotImplemented } - // FIXME - we're relying on a deprecated field and not processing the endpoint input - case source.StashBoxIndex != nil: - client, err := r.getStashBoxClient(*source.StashBoxIndex) + case source.StashBoxIndex != nil || source.StashBoxEndpoint != nil: + b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint) if err != nil { return nil, err } + client := r.newStashBoxClient(*b) + var res []*stashbox.StashBoxPerformerQueryResult switch { case input.PerformerID != nil: @@ -382,12 +433,14 @@ func (r *queryResolver) ScrapeSinglePerformer(ctx context.Context, source scrape func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source scraper.Source, input ScrapeMultiPerformersInput) ([][]*models.ScrapedPerformer, error) { if source.ScraperID != nil { return nil, ErrNotImplemented - } else if source.StashBoxIndex != nil { - client, err := r.getStashBoxClient(*source.StashBoxIndex) + } else if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil { + b, err := resolveStashBox(source.StashBoxIndex, source.StashBoxEndpoint) if err != nil { return nil, err } + client := r.newStashBoxClient(*b) + return client.FindStashBoxPerformersByPerformerNames(ctx, input.PerformerIds) } @@ -397,7 +450,7 @@ func (r *queryResolver) ScrapeMultiPerformers(ctx context.Context, source scrape func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper.Source, input ScrapeSingleGalleryInput) ([]*scraper.ScrapedGallery, error) { var ret []*scraper.ScrapedGallery - if source.StashBoxIndex != nil { + if source.StashBoxIndex != nil || source.StashBoxEndpoint != nil { return nil, ErrNotSupported } @@ -441,3 +494,7 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper. func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source scraper.Source, input ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) { return nil, ErrNotSupported } + +func (r *queryResolver) ScrapeSingleGroup(ctx context.Context, source scraper.Source, input ScrapeSingleGroupInput) ([]*models.ScrapedGroup, error) { + return nil, ErrNotSupported +} diff --git a/internal/api/routes_gallery.go b/internal/api/routes_gallery.go new file mode 100644 index 00000000000..e08663a708c --- /dev/null +++ b/internal/api/routes_gallery.go @@ -0,0 +1,159 @@ +package api + +import ( + "context" + "errors" + "net/http" + "strconv" + + "github.com/go-chi/chi/v5" + + "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/internal/static" + "github.com/stashapp/stash/pkg/image" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type GalleryFinder interface { + models.GalleryGetter + FindByChecksum(ctx context.Context, checksum string) ([]*models.Gallery, error) +} + +type GalleryImageFinder interface { + FindByGalleryIDIndex(ctx context.Context, galleryID int, index uint) (*models.Image, error) + image.Queryer + image.CoverQueryer +} + +type galleryRoutes struct { + routes + imageRoutes imageRoutes + galleryFinder GalleryFinder + imageFinder GalleryImageFinder + fileGetter models.FileGetter +} + +func (rs galleryRoutes) Routes() chi.Router { + r := chi.NewRouter() + + r.Route("/{galleryId}", func(r chi.Router) { + r.Use(rs.GalleryCtx) + + r.Get("/cover", rs.Cover) + r.Get("/preview/{imageIndex}", rs.Preview) + }) + + return r +} + +func (rs galleryRoutes) Cover(w http.ResponseWriter, r *http.Request) { + g := r.Context().Value(galleryKey).(*models.Gallery) + + var i *models.Image + _ = rs.withReadTxn(r, func(ctx context.Context) error { + // Find cover image first + i, _ = image.FindGalleryCover(ctx, rs.imageFinder, g.ID, config.GetInstance().GetGalleryCoverRegex()) + if i == nil { + return nil + } + + // serveThumbnail needs files populated + if err := i.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error loading primary file for image %d: %v", i.ID, err) + } + // set image to nil so that it doesn't try to use the primary file + i = nil + } + + return nil + }) + + if i == nil { + // fallback to default image + image := static.ReadAll(static.DefaultGalleryImage) + utils.ServeImage(w, r, image) + return + } + + rs.imageRoutes.serveThumbnail(w, r, i) +} + +func (rs galleryRoutes) Preview(w http.ResponseWriter, r *http.Request) { + g := r.Context().Value(galleryKey).(*models.Gallery) + indexQueryParam := chi.URLParam(r, "imageIndex") + var i *models.Image + + index, err := strconv.Atoi(indexQueryParam) + if err != nil || index < 0 { + http.Error(w, "bad index", 400) + return + } + + _ = rs.withReadTxn(r, func(ctx context.Context) error { + qb := rs.imageFinder + i, _ = qb.FindByGalleryIDIndex(ctx, g.ID, uint(index)) + if i == nil { + return nil + } + // TODO - handle errors? + + // serveThumbnail needs files populated + if err := i.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error loading primary file for image %d: %v", i.ID, err) + } + // set image to nil so that it doesn't try to use the primary file + i = nil + } + + return nil + }) + if i == nil { + http.Error(w, http.StatusText(404), 404) + return + } + + rs.imageRoutes.serveThumbnail(w, r, i) +} + +func (rs galleryRoutes) GalleryCtx(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + galleryIdentifierQueryParam := chi.URLParam(r, "galleryId") + galleryID, _ := strconv.Atoi(galleryIdentifierQueryParam) + + var gallery *models.Gallery + _ = rs.withReadTxn(r, func(ctx context.Context) error { + qb := rs.galleryFinder + if galleryID == 0 { + galleries, _ := qb.FindByChecksum(ctx, galleryIdentifierQueryParam) + if len(galleries) > 0 { + gallery = galleries[0] + } + } else { + gallery, _ = qb.Find(ctx, galleryID) + } + + if gallery != nil { + if err := gallery.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { + if !errors.Is(err, context.Canceled) { + logger.Errorf("error loading primary file for gallery %d: %v", galleryID, err) + } + // set image to nil so that it doesn't try to use the primary file + gallery = nil + } + } + + return nil + }) + if gallery == nil { + http.Error(w, http.StatusText(404), 404) + return + } + + ctx := context.WithValue(r.Context(), galleryKey, gallery) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} diff --git a/internal/api/routes_movie.go b/internal/api/routes_group.go similarity index 56% rename from internal/api/routes_movie.go rename to internal/api/routes_group.go index cd422468172..20eaa4c23e5 100644 --- a/internal/api/routes_movie.go +++ b/internal/api/routes_group.go @@ -14,22 +14,22 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type MovieFinder interface { - models.MovieGetter - GetFrontImage(ctx context.Context, movieID int) ([]byte, error) - GetBackImage(ctx context.Context, movieID int) ([]byte, error) +type GroupFinder interface { + models.GroupGetter + GetFrontImage(ctx context.Context, groupID int) ([]byte, error) + GetBackImage(ctx context.Context, groupID int) ([]byte, error) } -type movieRoutes struct { +type groupRoutes struct { routes - movieFinder MovieFinder + groupFinder GroupFinder } -func (rs movieRoutes) Routes() chi.Router { +func (rs groupRoutes) Routes() chi.Router { r := chi.NewRouter() - r.Route("/{movieId}", func(r chi.Router) { - r.Use(rs.MovieCtx) + r.Route("/{groupId}", func(r chi.Router) { + r.Use(rs.GroupCtx) r.Get("/frontimage", rs.FrontImage) r.Get("/backimage", rs.BackImage) }) @@ -37,77 +37,77 @@ func (rs movieRoutes) Routes() chi.Router { return r } -func (rs movieRoutes) FrontImage(w http.ResponseWriter, r *http.Request) { - movie := r.Context().Value(movieKey).(*models.Movie) +func (rs groupRoutes) FrontImage(w http.ResponseWriter, r *http.Request) { + group := r.Context().Value(groupKey).(*models.Group) defaultParam := r.URL.Query().Get("default") var image []byte if defaultParam != "true" { readTxnErr := rs.withReadTxn(r, func(ctx context.Context) error { var err error - image, err = rs.movieFinder.GetFrontImage(ctx, movie.ID) + image, err = rs.groupFinder.GetFrontImage(ctx, group.ID) return err }) if errors.Is(readTxnErr, context.Canceled) { return } if readTxnErr != nil { - logger.Warnf("read transaction error on fetch movie front image: %v", readTxnErr) + logger.Warnf("read transaction error on fetch group front image: %v", readTxnErr) } } // fallback to default image if len(image) == 0 { - image = static.ReadAll(static.DefaultMovieImage) + image = static.ReadAll(static.DefaultGroupImage) } utils.ServeImage(w, r, image) } -func (rs movieRoutes) BackImage(w http.ResponseWriter, r *http.Request) { - movie := r.Context().Value(movieKey).(*models.Movie) +func (rs groupRoutes) BackImage(w http.ResponseWriter, r *http.Request) { + group := r.Context().Value(groupKey).(*models.Group) defaultParam := r.URL.Query().Get("default") var image []byte if defaultParam != "true" { readTxnErr := rs.withReadTxn(r, func(ctx context.Context) error { var err error - image, err = rs.movieFinder.GetBackImage(ctx, movie.ID) + image, err = rs.groupFinder.GetBackImage(ctx, group.ID) return err }) if errors.Is(readTxnErr, context.Canceled) { return } if readTxnErr != nil { - logger.Warnf("read transaction error on fetch movie back image: %v", readTxnErr) + logger.Warnf("read transaction error on fetch group back image: %v", readTxnErr) } } // fallback to default image if len(image) == 0 { - image = static.ReadAll(static.DefaultMovieImage) + image = static.ReadAll(static.DefaultGroupImage) } utils.ServeImage(w, r, image) } -func (rs movieRoutes) MovieCtx(next http.Handler) http.Handler { +func (rs groupRoutes) GroupCtx(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - movieID, err := strconv.Atoi(chi.URLParam(r, "movieId")) + groupID, err := strconv.Atoi(chi.URLParam(r, "groupId")) if err != nil { http.Error(w, http.StatusText(404), 404) return } - var movie *models.Movie + var group *models.Group _ = rs.withReadTxn(r, func(ctx context.Context) error { - movie, _ = rs.movieFinder.Find(ctx, movieID) + group, _ = rs.groupFinder.Find(ctx, groupID) return nil }) - if movie == nil { + if group == nil { http.Error(w, http.StatusText(404), 404) return } - ctx := context.WithValue(r.Context(), movieKey, movie) + ctx := context.WithValue(r.Context(), groupKey, group) next.ServeHTTP(w, r.WithContext(ctx)) }) } diff --git a/internal/api/routes_image.go b/internal/api/routes_image.go index 270b4de7fe2..89e6d2db4d0 100644 --- a/internal/api/routes_image.go +++ b/internal/api/routes_image.go @@ -46,8 +46,12 @@ func (rs imageRoutes) Routes() chi.Router { } func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) { - mgr := manager.GetInstance() img := r.Context().Value(imageKey).(*models.Image) + rs.serveThumbnail(w, r, img) +} + +func (rs imageRoutes) serveThumbnail(w http.ResponseWriter, r *http.Request, img *models.Image) { + mgr := manager.GetInstance() filepath := mgr.Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth) // if the thumbnail doesn't exist, encode on the fly diff --git a/internal/api/server.go b/internal/api/server.go index b9fd1eeed8c..63a81da7c2e 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -75,7 +75,8 @@ func (dir osFS) Open(name string) (fs.File, error) { return os.DirFS(string(dir)).Open(name) } -// Called at startup +// Initialize creates a new [Server] instance. +// It assumes that the [manager.Manager] instance has been initialised. func Initialize() (*Server, error) { mgr := manager.GetInstance() cfg := mgr.Config @@ -157,11 +158,13 @@ func Initialize() (*Server, error) { sceneService := mgr.SceneService imageService := mgr.ImageService galleryService := mgr.GalleryService + groupService := mgr.GroupService resolver := &Resolver{ repository: repo, sceneService: sceneService, imageService: imageService, galleryService: galleryService, + groupService: groupService, hookExecutor: pluginCache, } @@ -207,9 +210,10 @@ func Initialize() (*Server, error) { r.Mount("/performer", server.getPerformerRoutes()) r.Mount("/scene", server.getSceneRoutes()) + r.Mount("/gallery", server.getGalleryRoutes()) r.Mount("/image", server.getImageRoutes()) r.Mount("/studio", server.getStudioRoutes()) - r.Mount("/movie", server.getMovieRoutes()) + r.Mount("/group", server.getGroupRoutes()) r.Mount("/tag", server.getTagRoutes()) r.Mount("/downloads", server.getDownloadsRoutes()) r.Mount("/plugin", server.getPluginRoutes()) @@ -288,6 +292,9 @@ func Initialize() (*Server, error) { return server, nil } +// Start starts the server. It listens on the configured address and port. +// It calls ListenAndServeTLS if TLS is configured, otherwise it calls ListenAndServe. +// Calls to Start are blocked until the server is shutdown. func (s *Server) Start() error { logger.Infof("stash is listening on " + s.Addr) logger.Infof("stash is running at " + s.displayAddress) @@ -299,6 +306,7 @@ func (s *Server) Start() error { } } +// Shutdown gracefully shuts down the server without interrupting any active connections. func (s *Server) Shutdown() { err := s.Server.Shutdown(context.TODO()) if err != nil { @@ -326,6 +334,16 @@ func (s *Server) getSceneRoutes() chi.Router { }.Routes() } +func (s *Server) getGalleryRoutes() chi.Router { + repo := s.manager.Repository + return galleryRoutes{ + routes: routes{txnManager: repo.TxnManager}, + imageFinder: repo.Image, + galleryFinder: repo.Gallery, + fileGetter: repo.File, + }.Routes() +} + func (s *Server) getImageRoutes() chi.Router { repo := s.manager.Repository return imageRoutes{ @@ -343,11 +361,11 @@ func (s *Server) getStudioRoutes() chi.Router { }.Routes() } -func (s *Server) getMovieRoutes() chi.Router { +func (s *Server) getGroupRoutes() chi.Router { repo := s.manager.Repository - return movieRoutes{ + return groupRoutes{ routes: routes{txnManager: repo.TxnManager}, - movieFinder: repo.Movie, + groupFinder: repo.Group, }.Routes() } diff --git a/internal/api/stash_box.go b/internal/api/stash_box.go new file mode 100644 index 00000000000..6aa5e6ddcda --- /dev/null +++ b/internal/api/stash_box.go @@ -0,0 +1,45 @@ +package api + +import ( + "fmt" + "strings" + + "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/scraper/stashbox" +) + +func (r *Resolver) newStashBoxClient(box models.StashBox) *stashbox.Client { + return stashbox.NewClient(box, r.stashboxRepository()) +} + +func resolveStashBoxFn(indexField, endpointField string) func(index *int, endpoint *string) (*models.StashBox, error) { + return func(index *int, endpoint *string) (*models.StashBox, error) { + boxes := config.GetInstance().GetStashBoxes() + + // prefer endpoint over index + if endpoint != nil { + for _, box := range boxes { + if strings.EqualFold(*endpoint, box.Endpoint) { + return box, nil + } + } + return nil, fmt.Errorf("stash box not found") + } + + if index != nil { + if *index < 0 || *index >= len(boxes) { + return nil, fmt.Errorf("invalid %s %d", indexField, index) + } + + return boxes[*index], nil + } + + return nil, fmt.Errorf("%s not provided", endpointField) + } +} + +var ( + resolveStashBox = resolveStashBoxFn("stash_box_index", "stash_box_endpoint") + resolveStashBoxBatchTagInput = resolveStashBoxFn("endpoint", "stash_box_endpoint") +) diff --git a/internal/api/urlbuilders/doc.go b/internal/api/urlbuilders/doc.go new file mode 100644 index 00000000000..636ec50f19b --- /dev/null +++ b/internal/api/urlbuilders/doc.go @@ -0,0 +1,2 @@ +// Package urlbuilders provides the builders used to build URLs to pass to clients. +package urlbuilders diff --git a/internal/api/urlbuilders/gallery.go b/internal/api/urlbuilders/gallery.go new file mode 100644 index 00000000000..3e6c5ef08af --- /dev/null +++ b/internal/api/urlbuilders/gallery.go @@ -0,0 +1,27 @@ +package urlbuilders + +import ( + "strconv" + + "github.com/stashapp/stash/pkg/models" +) + +type GalleryURLBuilder struct { + BaseURL string + GalleryID string +} + +func NewGalleryURLBuilder(baseURL string, gallery *models.Gallery) GalleryURLBuilder { + return GalleryURLBuilder{ + BaseURL: baseURL, + GalleryID: strconv.Itoa(gallery.ID), + } +} + +func (b GalleryURLBuilder) GetPreviewURL() string { + return b.BaseURL + "/gallery/" + b.GalleryID + "/preview" +} + +func (b GalleryURLBuilder) GetCoverURL() string { + return b.BaseURL + "/gallery/" + b.GalleryID + "/cover" +} diff --git a/internal/api/urlbuilders/group.go b/internal/api/urlbuilders/group.go new file mode 100644 index 00000000000..26abd8dbdc6 --- /dev/null +++ b/internal/api/urlbuilders/group.go @@ -0,0 +1,33 @@ +package urlbuilders + +import ( + "strconv" + + "github.com/stashapp/stash/pkg/models" +) + +type GroupURLBuilder struct { + BaseURL string + GroupID string + UpdatedAt string +} + +func NewGroupURLBuilder(baseURL string, group *models.Group) GroupURLBuilder { + return GroupURLBuilder{ + BaseURL: baseURL, + GroupID: strconv.Itoa(group.ID), + UpdatedAt: strconv.FormatInt(group.UpdatedAt.Unix(), 10), + } +} + +func (b GroupURLBuilder) GetGroupFrontImageURL(hasImage bool) string { + url := b.BaseURL + "/group/" + b.GroupID + "/frontimage?t=" + b.UpdatedAt + if !hasImage { + url += "&default=true" + } + return url +} + +func (b GroupURLBuilder) GetGroupBackImageURL() string { + return b.BaseURL + "/group/" + b.GroupID + "/backimage?t=" + b.UpdatedAt +} diff --git a/internal/api/urlbuilders/movie.go b/internal/api/urlbuilders/movie.go deleted file mode 100644 index a9ca6831078..00000000000 --- a/internal/api/urlbuilders/movie.go +++ /dev/null @@ -1,32 +0,0 @@ -package urlbuilders - -import ( - "github.com/stashapp/stash/pkg/models" - "strconv" -) - -type MovieURLBuilder struct { - BaseURL string - MovieID string - UpdatedAt string -} - -func NewMovieURLBuilder(baseURL string, movie *models.Movie) MovieURLBuilder { - return MovieURLBuilder{ - BaseURL: baseURL, - MovieID: strconv.Itoa(movie.ID), - UpdatedAt: strconv.FormatInt(movie.UpdatedAt.Unix(), 10), - } -} - -func (b MovieURLBuilder) GetMovieFrontImageURL(hasImage bool) string { - url := b.BaseURL + "/movie/" + b.MovieID + "/frontimage?t=" + b.UpdatedAt - if !hasImage { - url += "&default=true" - } - return url -} - -func (b MovieURLBuilder) GetMovieBackImageURL() string { - return b.BaseURL + "/movie/" + b.MovieID + "/backimage?t=" + b.UpdatedAt -} diff --git a/internal/autotag/doc.go b/internal/autotag/doc.go new file mode 100644 index 00000000000..a495949e8b6 --- /dev/null +++ b/internal/autotag/doc.go @@ -0,0 +1,9 @@ +// Package autotag provides the autotagging functionality for the application. +// +// The autotag functionality sets media metadata based on the media's path. +// The functions in this package are in the form of {ObjectType}{TagTypes}, +// where the ObjectType is the single object instance to run on, and TagTypes +// are the related types. +// For example, PerformerScenes finds and tags scenes with a provided performer, +// whereas ScenePerformers tags a single scene with any Performers that match. +package autotag diff --git a/internal/build/version.go b/internal/build/version.go index 84c5f819f4f..ecccd970372 100644 --- a/internal/build/version.go +++ b/internal/build/version.go @@ -1,3 +1,4 @@ +// Package build provides the version information for the application. package build import ( diff --git a/internal/desktop/desktop.go b/internal/desktop/desktop.go index b5a261bf7a9..a89a3c96256 100644 --- a/internal/desktop/desktop.go +++ b/internal/desktop/desktop.go @@ -1,3 +1,4 @@ +// Package desktop provides desktop integration functionality for the application. package desktop import ( diff --git a/internal/dlna/cds.go b/internal/dlna/cds.go index 6e1604bdca3..a38e0e55bed 100644 --- a/internal/dlna/cds.go +++ b/internal/dlna/cds.go @@ -192,7 +192,7 @@ func (me *contentDirectoryService) Handle(action string, argsXML []byte, r *http obj, err := me.objectFromID(browse.ObjectID) if err != nil { - return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, err.Error()) + return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "cannot find object with id %q: %v", browse.ObjectID, err.Error()) } switch browse.BrowseFlag { @@ -316,13 +316,13 @@ func (me *contentDirectoryService) handleBrowseDirectChildren(obj object, host s objs = me.getPerformerScenes(childPath(paths), host) } - // Movies - if obj.Path == "movies" { - objs = me.getMovies() + // Groups - deprecated + if obj.Path == "groups" { + objs = me.getGroups() } - if strings.HasPrefix(obj.Path, "movies/") { - objs = me.getMovieScenes(childPath(paths), host) + if strings.HasPrefix(obj.Path, "groups/") { + objs = me.getGroupScenes(childPath(paths), host) } // Rating @@ -433,7 +433,7 @@ func getRootObjects() []interface{} { objs = append(objs, makeStorageFolder("performers", "performers", rootID)) objs = append(objs, makeStorageFolder("tags", "tags", rootID)) objs = append(objs, makeStorageFolder("studios", "studios", rootID)) - objs = append(objs, makeStorageFolder("movies", "movies", rootID)) + objs = append(objs, makeStorageFolder("groups", "groups", rootID)) objs = append(objs, makeStorageFolder("rating", "rating", rootID)) return objs @@ -658,18 +658,18 @@ func (me *contentDirectoryService) getPerformerScenes(paths []string, host strin return me.getVideos(sceneFilter, parentID, host) } -func (me *contentDirectoryService) getMovies() []interface{} { +func (me *contentDirectoryService) getGroups() []interface{} { var objs []interface{} r := me.repository if err := r.WithReadTxn(context.TODO(), func(ctx context.Context) error { - movies, err := r.MovieFinder.All(ctx) + groups, err := r.GroupFinder.All(ctx) if err != nil { return err } - for _, s := range movies { - objs = append(objs, makeStorageFolder("movies/"+strconv.Itoa(s.ID), s.Name, "movies")) + for _, s := range groups { + objs = append(objs, makeStorageFolder("groups/"+strconv.Itoa(s.ID), s.Name, "groups")) } return nil @@ -680,15 +680,15 @@ func (me *contentDirectoryService) getMovies() []interface{} { return objs } -func (me *contentDirectoryService) getMovieScenes(paths []string, host string) []interface{} { +func (me *contentDirectoryService) getGroupScenes(paths []string, host string) []interface{} { sceneFilter := &models.SceneFilterType{ - Movies: &models.MultiCriterionInput{ + Groups: &models.HierarchicalMultiCriterionInput{ Modifier: models.CriterionModifierIncludes, Value: []string{paths[0]}, }, } - parentID := "movies/" + strings.Join(paths, "/") + parentID := "groups/" + strings.Join(paths, "/") page := getPageFromID(paths) if page != nil { diff --git a/internal/dlna/dms.go b/internal/dlna/dms.go index 0df483ccac9..3b27d607b2d 100644 --- a/internal/dlna/dms.go +++ b/internal/dlna/dms.go @@ -40,6 +40,7 @@ import ( "path" "strconv" "strings" + "sync" "time" "github.com/anacrolix/dms/soap" @@ -67,8 +68,8 @@ type PerformerFinder interface { All(ctx context.Context) ([]*models.Performer, error) } -type MovieFinder interface { - All(ctx context.Context) ([]*models.Movie, error) +type GroupFinder interface { + All(ctx context.Context) ([]*models.Group, error) } const ( @@ -229,6 +230,10 @@ func (me *Server) ssdpInterface(if_ net.Interface) { stopped := make(chan struct{}) go func() { defer close(stopped) + // FIXME - this currently blocks forever unless it encounters an error + // See https://github.com/anacrolix/dms/pull/150 + // Needs to be fixed upstream + //nolint:staticcheck if err := s.Serve(); err != nil { logger.Errorf("%q: %q\n", if_.Name, err) } @@ -274,6 +279,8 @@ type Server struct { sceneServer sceneServer ipWhitelistManager *ipWhitelistManager VideoSortOrder string + + subscribeLock sync.Mutex } // UPnP SOAP service. @@ -537,13 +544,14 @@ func (me *Server) contentDirectoryEventSubHandler(w http.ResponseWriter, r *http // The following code is a work in progress. It partially implements // the spec on eventing but hasn't been completed as I have nothing to // test it with. - service := me.services["ContentDirectory"] switch { case r.Method == "SUBSCRIBE" && r.Header.Get("SID") == "": urls := upnp.ParseCallbackURLs(r.Header.Get("CALLBACK")) var timeout int _, _ = fmt.Sscanf(r.Header.Get("TIMEOUT"), "Second-%d", &timeout) - sid, timeout, _ := service.Subscribe(urls, timeout) + + sid, timeout, _ := me.subscribe(urls, timeout) + w.Header()["SID"] = []string{sid} w.Header()["TIMEOUT"] = []string{fmt.Sprintf("Second-%d", timeout)} // TODO: Shouldn't have to do this to get headers logged. @@ -559,6 +567,16 @@ func (me *Server) contentDirectoryEventSubHandler(w http.ResponseWriter, r *http } } +// wrapper around service.Subscribe which requires concurrency protection +// TODO - this should be addressed upstream +func (me *Server) subscribe(urls []*url.URL, timeout int) (sid string, actualTimeout int, err error) { + me.subscribeLock.Lock() + defer me.subscribeLock.Unlock() + + service := me.services["ContentDirectory"] + return service.Subscribe(urls, timeout) +} + func (me *Server) initMux(mux *http.ServeMux) { mux.HandleFunc("/", func(resp http.ResponseWriter, req *http.Request) { resp.Header().Set("content-type", "text/html") diff --git a/internal/dlna/doc.go b/internal/dlna/doc.go new file mode 100644 index 00000000000..b5955c349f4 --- /dev/null +++ b/internal/dlna/doc.go @@ -0,0 +1,3 @@ +// Package dlna provides the DLNA functionality for the application. +// Much of this code is adapted from https://github.com/anacrolix/dms +package dlna diff --git a/internal/dlna/service.go b/internal/dlna/service.go index 07f41608441..6ef825bacae 100644 --- a/internal/dlna/service.go +++ b/internal/dlna/service.go @@ -22,7 +22,7 @@ type Repository struct { StudioFinder StudioFinder TagFinder TagFinder PerformerFinder PerformerFinder - MovieFinder MovieFinder + GroupFinder GroupFinder } func NewRepository(repo models.Repository) Repository { @@ -33,7 +33,7 @@ func NewRepository(repo models.Repository) Repository { StudioFinder: repo.Studio, TagFinder: repo.Tag, PerformerFinder: repo.Performer, - MovieFinder: repo.Movie, + GroupFinder: repo.Group, } } diff --git a/internal/identify/identify.go b/internal/identify/identify.go index 43e1dedf359..5eecd0d9927 100644 --- a/internal/identify/identify.go +++ b/internal/identify/identify.go @@ -1,3 +1,6 @@ +// Package identify provides the scene identification functionality for the application. +// The identify functionality uses scene scrapers to identify a given scene and +// set its metadata based on the scraped data. package identify import ( diff --git a/internal/identify/studio.go b/internal/identify/studio.go index d05967bc4f2..51bcaf2eec9 100644 --- a/internal/identify/studio.go +++ b/internal/identify/studio.go @@ -46,17 +46,17 @@ func createMissingStudio(ctx context.Context, endpoint string, w models.StudioRe return nil, err } - studioPartial := s.Parent.ToPartial(s.Parent.StoredID, endpoint, nil, existingStashIDs) + studioPartial := s.Parent.ToPartial(*s.Parent.StoredID, endpoint, nil, existingStashIDs) parentImage, err := s.Parent.GetImage(ctx, nil) if err != nil { return nil, err } - if err := studio.ValidateModify(ctx, *studioPartial, w); err != nil { + if err := studio.ValidateModify(ctx, studioPartial, w); err != nil { return nil, err } - _, err = w.UpdatePartial(ctx, *studioPartial) + _, err = w.UpdatePartial(ctx, studioPartial) if err != nil { return nil, err } diff --git a/internal/log/logger.go b/internal/log/logger.go index 50f5a42b472..5f686d32d5d 100644 --- a/internal/log/logger.go +++ b/internal/log/logger.go @@ -1,3 +1,4 @@ +// Package log provides an implementation of [logger.LoggerImpl], using logrus. package log import ( diff --git a/internal/manager/config/config.go b/internal/manager/config/config.go index 184d78494eb..d56d3359bab 100644 --- a/internal/manager/config/config.go +++ b/internal/manager/config/config.go @@ -232,6 +232,9 @@ const ( SecurityTripwireAccessedFromPublicInternet = "security_tripwire_accessed_from_public_internet" securityTripwireAccessedFromPublicInternetDefault = "" + sslCertPath = "ssl_cert_path" + sslKeyPath = "ssl_key_path" + // DLNA options DLNAServerName = "dlna.server_name" DLNADefaultEnabled = "dlna.default_enabled" @@ -356,8 +359,17 @@ func (i *Config) InitTLS() { paths.GetStashHomeDirectory(), } - i.certFile = fsutil.FindInPaths(tlsPaths, "stash.crt") - i.keyFile = fsutil.FindInPaths(tlsPaths, "stash.key") + i.certFile = i.getString(sslCertPath) + if i.certFile == "" { + // Look for default file + i.certFile = fsutil.FindInPaths(tlsPaths, "stash.crt") + } + + i.keyFile = i.getString(sslKeyPath) + if i.keyFile == "" { + // Look for default file + i.keyFile = fsutil.FindInPaths(tlsPaths, "stash.key") + } } func (i *Config) GetTLSFiles() (certFile, keyFile string) { diff --git a/internal/manager/init.go b/internal/manager/init.go index 347d08a153e..dd1640ed368 100644 --- a/internal/manager/init.go +++ b/internal/manager/init.go @@ -17,6 +17,7 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/group" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" @@ -67,6 +68,10 @@ func Initialize(cfg *config.Config, l *log.Logger) (*Manager, error) { Folder: db.Folder, } + groupService := &group.Service{ + Repository: db.Group, + } + sceneServer := &SceneServer{ TxnManager: repo.TxnManager, SceneCoverGetter: repo.Scene, @@ -99,6 +104,7 @@ func Initialize(cfg *config.Config, l *log.Logger) (*Manager, error) { SceneService: sceneService, ImageService: imageService, GalleryService: galleryService, + GroupService: groupService, scanSubs: &subscriptionManager{}, } @@ -305,7 +311,7 @@ func (s *Manager) RefreshFFMpeg(ctx context.Context) { logger.Debugf("using ffprobe: %s", ffprobePath) s.FFMpeg = ffmpeg.NewEncoder(ffmpegPath) - s.FFProbe = ffmpeg.FFProbe(ffprobePath) + s.FFProbe = ffmpeg.NewFFProbe(ffprobePath) s.FFMpeg.InitHWSupport(ctx) } diff --git a/internal/manager/json_utils.go b/internal/manager/json_utils.go index c90c9502942..f1ce60404f1 100644 --- a/internal/manager/json_utils.go +++ b/internal/manager/json_utils.go @@ -23,8 +23,8 @@ func (jp *jsonUtils) saveTag(fn string, tag *jsonschema.Tag) error { return jsonschema.SaveTagFile(filepath.Join(jp.json.Tags, fn), tag) } -func (jp *jsonUtils) saveMovie(fn string, movie *jsonschema.Movie) error { - return jsonschema.SaveMovieFile(filepath.Join(jp.json.Movies, fn), movie) +func (jp *jsonUtils) saveGroup(fn string, group *jsonschema.Group) error { + return jsonschema.SaveGroupFile(filepath.Join(jp.json.Groups, fn), group) } func (jp *jsonUtils) saveScene(fn string, scene *jsonschema.Scene) error { diff --git a/internal/manager/manager.go b/internal/manager/manager.go index 7032c3329fc..4827a3e3d92 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -1,3 +1,5 @@ +// Package manager provides the core manager of the application. +// This consolidates all the services and managers into a single struct. package manager import ( @@ -41,7 +43,7 @@ type Manager struct { Paths *paths.Paths FFMpeg *ffmpeg.FFMpeg - FFProbe ffmpeg.FFProbe + FFProbe *ffmpeg.FFProbe StreamManager *ffmpeg.StreamManager JobManager *job.Manager @@ -64,6 +66,7 @@ type Manager struct { SceneService SceneService ImageService ImageService GalleryService GalleryService + GroupService GroupService scanSubs *subscriptionManager } @@ -297,7 +300,7 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error { } func (s *Manager) validateFFmpeg() error { - if s.FFMpeg == nil || s.FFProbe == "" { + if s.FFMpeg == nil || s.FFProbe == nil { return errors.New("missing ffmpeg and/or ffprobe") } return nil @@ -397,7 +400,7 @@ func (s *Manager) GetSystemStatus() *SystemStatus { } ffprobePath := "" - if s.FFProbe != "" { + if s.FFProbe != nil { ffprobePath = s.FFProbe.Path() } diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go index dd2b9dcc29a..b85a4c2cf75 100644 --- a/internal/manager/manager_tasks.go +++ b/internal/manager/manager_tasks.go @@ -366,8 +366,9 @@ func (s *Manager) MigrateHash(ctx context.Context) int { // If neither ids nor names are set, tag all items type StashBoxBatchTagInput struct { - // Stash endpoint to use for the tagging - Endpoint int `json:"endpoint"` + // Stash endpoint to use for the tagging - deprecated - use StashBoxEndpoint + Endpoint *int `json:"endpoint"` + StashBoxEndpoint *string `json:"stash_box_endpoint"` // Fields to exclude when executing the tagging ExcludeFields []string `json:"exclude_fields"` // Refresh items already tagged by StashBox if true. Only tag items with no StashBox tagging if false @@ -388,16 +389,10 @@ type StashBoxBatchTagInput struct { PerformerNames []string `json:"performer_names"` } -func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, input StashBoxBatchTagInput) int { +func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int { j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error { logger.Infof("Initiating stash-box batch performer tag") - boxes := config.GetInstance().GetStashBoxes() - if input.Endpoint < 0 || input.Endpoint >= len(boxes) { - return fmt.Errorf("invalid stash_box_index %d", input.Endpoint) - } - box := boxes[input.Endpoint] - var tasks []StashBoxBatchTagTask // The gocritic linter wants to turn this ifElseChain into a switch. @@ -526,16 +521,10 @@ func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, input StashBoxB return s.JobManager.Add(ctx, "Batch stash-box performer tag...", j) } -func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, input StashBoxBatchTagInput) int { +func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, box *models.StashBox, input StashBoxBatchTagInput) int { j := job.MakeJobExec(func(ctx context.Context, progress *job.Progress) error { logger.Infof("Initiating stash-box batch studio tag") - boxes := config.GetInstance().GetStashBoxes() - if input.Endpoint < 0 || input.Endpoint >= len(boxes) { - return fmt.Errorf("invalid stash_box_index %d", input.Endpoint) - } - box := boxes[input.Endpoint] - var tasks []StashBoxBatchTagTask // The gocritic linter wants to turn this ifElseChain into a switch. diff --git a/internal/manager/repository.go b/internal/manager/repository.go index adfbfcb63d3..13e1e8ae81b 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -3,6 +3,7 @@ package manager import ( "context" + "github.com/stashapp/stash/pkg/group" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" @@ -24,9 +25,21 @@ type GalleryService interface { AddImages(ctx context.Context, g *models.Gallery, toAdd ...int) error RemoveImages(ctx context.Context, g *models.Gallery, toRemove ...int) error + SetCover(ctx context.Context, g *models.Gallery, coverImageId int) error + ResetCover(ctx context.Context, g *models.Gallery) error + Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error) ValidateImageGalleryChange(ctx context.Context, i *models.Image, updateIDs models.UpdateIDs) error Updated(ctx context.Context, galleryID int) error } + +type GroupService interface { + Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error + UpdatePartial(ctx context.Context, id int, updatedGroup models.GroupPartial, frontImage group.ImageInput, backImage group.ImageInput) (*models.Group, error) + + AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error + RemoveSubGroups(ctx context.Context, groupID int, subGroupIDs []int) error + ReorderSubGroups(ctx context.Context, groupID int, subGroupIDs []int, insertPointID int, insertAfter bool) error +} diff --git a/internal/manager/task/migrate.go b/internal/manager/task/migrate.go index 48ba15a26a9..37062329e48 100644 --- a/internal/manager/task/migrate.go +++ b/internal/manager/task/migrate.go @@ -23,19 +23,27 @@ type MigrateJob struct { Database *sqlite.Database } +type databaseSchemaInfo struct { + CurrentSchemaVersion uint + RequiredSchemaVersion uint + StepsRequired uint +} + func (s *MigrateJob) Execute(ctx context.Context, progress *job.Progress) error { - required, err := s.required() + schemaInfo, err := s.required() if err != nil { return err } - if required == 0 { + if schemaInfo.StepsRequired == 0 { logger.Infof("database is already at the latest schema version") return nil } + logger.Infof("Migrating database from %d to %d", schemaInfo.CurrentSchemaVersion, schemaInfo.RequiredSchemaVersion) + // set the number of tasks = required steps + optimise - progress.SetTotal(int(required + 1)) + progress.SetTotal(int(schemaInfo.StepsRequired + 1)) database := s.Database @@ -79,28 +87,31 @@ func (s *MigrateJob) Execute(ctx context.Context, progress *job.Progress) error } } + logger.Infof("Database migration complete") + return nil } -func (s *MigrateJob) required() (uint, error) { +func (s *MigrateJob) required() (ret databaseSchemaInfo, err error) { database := s.Database m, err := sqlite.NewMigrator(database) if err != nil { - return 0, err + return } defer m.Close() - currentSchemaVersion := m.CurrentSchemaVersion() - targetSchemaVersion := m.RequiredSchemaVersion() + ret.CurrentSchemaVersion = m.CurrentSchemaVersion() + ret.RequiredSchemaVersion = m.RequiredSchemaVersion() - if targetSchemaVersion < currentSchemaVersion { + if ret.RequiredSchemaVersion < ret.CurrentSchemaVersion { // shouldn't happen - return 0, nil + return } - return targetSchemaVersion - currentSchemaVersion, nil + ret.StepsRequired = ret.RequiredSchemaVersion - ret.CurrentSchemaVersion + return } func (s *MigrateJob) runMigrations(ctx context.Context, progress *job.Progress) error { diff --git a/internal/manager/task_export.go b/internal/manager/task_export.go index 155f9feced8..19abba2158d 100644 --- a/internal/manager/task_export.go +++ b/internal/manager/task_export.go @@ -15,13 +15,13 @@ import ( "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/group" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/paths" - "github.com/stashapp/stash/pkg/movie" "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/sliceutil" @@ -42,7 +42,7 @@ type ExportTask struct { scenes *exportSpec images *exportSpec performers *exportSpec - movies *exportSpec + groups *exportSpec tags *exportSpec studios *exportSpec galleries *exportSpec @@ -63,7 +63,8 @@ type ExportObjectsInput struct { Studios *ExportObjectTypeInput `json:"studios"` Performers *ExportObjectTypeInput `json:"performers"` Tags *ExportObjectTypeInput `json:"tags"` - Movies *ExportObjectTypeInput `json:"movies"` + Groups *ExportObjectTypeInput `json:"groups"` + Movies *ExportObjectTypeInput `json:"movies"` // deprecated Galleries *ExportObjectTypeInput `json:"galleries"` IncludeDependencies *bool `json:"includeDependencies"` } @@ -97,13 +98,19 @@ func CreateExportTask(a models.HashAlgorithm, input ExportObjectsInput) *ExportT includeDeps = *input.IncludeDependencies } + // handle deprecated Movies field + groupSpec := input.Groups + if groupSpec == nil && input.Movies != nil { + groupSpec = input.Movies + } + return &ExportTask{ repository: GetInstance().Repository, fileNamingAlgorithm: a, scenes: newExportSpec(input.Scenes), images: newExportSpec(input.Images), performers: newExportSpec(input.Performers), - movies: newExportSpec(input.Movies), + groups: newExportSpec(groupSpec), tags: newExportSpec(input.Tags), studios: newExportSpec(input.Studios), galleries: newExportSpec(input.Galleries), @@ -113,7 +120,7 @@ func CreateExportTask(a models.HashAlgorithm, input ExportObjectsInput) *ExportT func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) { defer wg.Done() - // @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count + Movie.count + // @manager.total = Scene.count + Gallery.count + Performer.count + Studio.count + Group.count workerCount := runtime.GOMAXPROCS(0) // set worker count to number of cpus available startTime := time.Now() @@ -149,11 +156,11 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) { paths.EnsureJSONDirs(t.baseDir) txnErr := t.repository.WithTxn(ctx, func(ctx context.Context) error { - // include movie scenes and gallery images + // include group scenes and gallery images if !t.full { - // only include movie scenes if includeDependencies is also set + // only include group scenes if includeDependencies is also set if !t.scenes.all && t.includeDependencies { - t.populateMovieScenes(ctx) + t.populateGroupScenes(ctx) } // always export gallery images @@ -165,7 +172,7 @@ func (t *ExportTask) Start(ctx context.Context, wg *sync.WaitGroup) { t.ExportScenes(ctx, workerCount) t.ExportImages(ctx, workerCount) t.ExportGalleries(ctx, workerCount) - t.ExportMovies(ctx, workerCount) + t.ExportGroups(ctx, workerCount) t.ExportPerformers(ctx, workerCount) t.ExportStudios(ctx, workerCount) t.ExportTags(ctx, workerCount) @@ -222,7 +229,7 @@ func (t *ExportTask) zipFiles(w io.Writer) error { walkWarn(t.json.json.Galleries, t.zipWalkFunc(u.json.Galleries, z)) walkWarn(t.json.json.Performers, t.zipWalkFunc(u.json.Performers, z)) walkWarn(t.json.json.Studios, t.zipWalkFunc(u.json.Studios, z)) - walkWarn(t.json.json.Movies, t.zipWalkFunc(u.json.Movies, z)) + walkWarn(t.json.json.Groups, t.zipWalkFunc(u.json.Groups, z)) walkWarn(t.json.json.Scenes, t.zipWalkFunc(u.json.Scenes, z)) walkWarn(t.json.json.Images, t.zipWalkFunc(u.json.Images, z)) @@ -275,28 +282,28 @@ func (t *ExportTask) zipFile(fn, outDir string, z *zip.Writer) error { return nil } -func (t *ExportTask) populateMovieScenes(ctx context.Context) { +func (t *ExportTask) populateGroupScenes(ctx context.Context) { r := t.repository - reader := r.Movie + reader := r.Group sceneReader := r.Scene - var movies []*models.Movie + var groups []*models.Group var err error - all := t.full || (t.movies != nil && t.movies.all) + all := t.full || (t.groups != nil && t.groups.all) if all { - movies, err = reader.All(ctx) - } else if t.movies != nil && len(t.movies.IDs) > 0 { - movies, err = reader.FindMany(ctx, t.movies.IDs) + groups, err = reader.All(ctx) + } else if t.groups != nil && len(t.groups.IDs) > 0 { + groups, err = reader.FindMany(ctx, t.groups.IDs) } if err != nil { - logger.Errorf("[movies] failed to fetch movies: %v", err) + logger.Errorf("[groups] failed to fetch groups: %v", err) } - for _, m := range movies { - scenes, err := sceneReader.FindByMovieID(ctx, m.ID) + for _, m := range groups { + scenes, err := sceneReader.FindByGroupID(ctx, m.ID) if err != nil { - logger.Errorf("[movies] <%s> failed to fetch scenes for movie: %v", m.Name, err) + logger.Errorf("[groups] <%s> failed to fetch scenes for group: %v", m.Name, err) continue } @@ -481,7 +488,7 @@ func (t *ExportTask) exportScene(ctx context.Context, wg *sync.WaitGroup, jobCha r := t.repository sceneReader := r.Scene studioReader := r.Studio - movieReader := r.Movie + groupReader := r.Group galleryReader := r.Gallery performerReader := r.Performer tagReader := r.Tag @@ -549,9 +556,9 @@ func (t *ExportTask) exportScene(ctx context.Context, wg *sync.WaitGroup, jobCha continue } - newSceneJSON.Movies, err = scene.GetSceneMoviesJSON(ctx, movieReader, s) + newSceneJSON.Groups, err = scene.GetSceneGroupsJSON(ctx, groupReader, s) if err != nil { - logger.Errorf("[scenes] <%s> error getting scene movies JSON: %v", sceneHash, err) + logger.Errorf("[scenes] <%s> error getting scene groups JSON: %v", sceneHash, err) continue } @@ -569,12 +576,12 @@ func (t *ExportTask) exportScene(ctx context.Context, wg *sync.WaitGroup, jobCha } t.tags.IDs = sliceutil.AppendUniques(t.tags.IDs, tagIDs) - movieIDs, err := scene.GetDependentMovieIDs(ctx, s) + groupIDs, err := scene.GetDependentGroupIDs(ctx, s) if err != nil { - logger.Errorf("[scenes] <%s> error getting scene movies: %v", sceneHash, err) + logger.Errorf("[scenes] <%s> error getting scene groups: %v", sceneHash, err) continue } - t.movies.IDs = sliceutil.AppendUniques(t.movies.IDs, movieIDs) + t.groups.IDs = sliceutil.AppendUniques(t.groups.IDs, groupIDs) t.performers.IDs = sliceutil.AppendUniques(t.performers.IDs, performer.GetIDs(performers)) } @@ -982,6 +989,7 @@ func (t *ExportTask) ExportStudios(ctx context.Context, workers int) { func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Studio) { defer wg.Done() + r := t.repository studioReader := t.repository.Studio for s := range jobChan { @@ -992,6 +1000,18 @@ func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobCh continue } + tags, err := r.Tag.FindByStudioID(ctx, s.ID) + if err != nil { + logger.Errorf("[studios] <%s> error getting studio tags: %s", s.Name, err.Error()) + continue + } + + newStudioJSON.Tags = tag.GetNames(tags) + + if t.includeDependencies { + t.tags.IDs = sliceutil.AppendUniques(t.tags.IDs, tag.GetIDs(tags)) + } + fn := newStudioJSON.Filename() if err := t.json.saveStudio(fn, newStudioJSON); err != nil { @@ -1061,71 +1081,108 @@ func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan } } -func (t *ExportTask) ExportMovies(ctx context.Context, workers int) { - var moviesWg sync.WaitGroup +func (t *ExportTask) ExportGroups(ctx context.Context, workers int) { + var groupsWg sync.WaitGroup - reader := t.repository.Movie - var movies []*models.Movie + reader := t.repository.Group + var groups []*models.Group var err error - all := t.full || (t.movies != nil && t.movies.all) + all := t.full || (t.groups != nil && t.groups.all) if all { - movies, err = reader.All(ctx) - } else if t.movies != nil && len(t.movies.IDs) > 0 { - movies, err = reader.FindMany(ctx, t.movies.IDs) + groups, err = reader.All(ctx) + } else if t.groups != nil && len(t.groups.IDs) > 0 { + groups, err = reader.FindMany(ctx, t.groups.IDs) } if err != nil { - logger.Errorf("[movies] failed to fetch movies: %v", err) + logger.Errorf("[groups] failed to fetch groups: %v", err) } - logger.Info("[movies] exporting") + logger.Info("[groups] exporting") startTime := time.Now() - jobCh := make(chan *models.Movie, workers*2) // make a buffered channel to feed workers + jobCh := make(chan *models.Group, workers*2) // make a buffered channel to feed workers for w := 0; w < workers; w++ { // create export Studio workers - moviesWg.Add(1) - go t.exportMovie(ctx, &moviesWg, jobCh) + groupsWg.Add(1) + go t.exportGroup(ctx, &groupsWg, jobCh) } - for i, movie := range movies { + for i, group := range groups { index := i + 1 - logger.Progressf("[movies] %d of %d", index, len(movies)) + logger.Progressf("[groups] %d of %d", index, len(groups)) - jobCh <- movie // feed workers + jobCh <- group // feed workers } close(jobCh) - moviesWg.Wait() + groupsWg.Wait() - logger.Infof("[movies] export complete in %s. %d workers used.", time.Since(startTime), workers) + logger.Infof("[groups] export complete in %s. %d workers used.", time.Since(startTime), workers) } -func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Movie) { +func (t *ExportTask) exportGroup(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Group) { defer wg.Done() r := t.repository - movieReader := r.Movie + groupReader := r.Group studioReader := r.Studio + tagReader := r.Tag for m := range jobChan { - newMovieJSON, err := movie.ToJSON(ctx, movieReader, studioReader, m) + if err := m.LoadURLs(ctx, r.Group); err != nil { + logger.Errorf("[groups] <%s> error getting group urls: %v", m.Name, err) + continue + } + if err := m.LoadSubGroupIDs(ctx, r.Group); err != nil { + logger.Errorf("[groups] <%s> error getting group sub-groups: %v", m.Name, err) + continue + } + + newGroupJSON, err := group.ToJSON(ctx, groupReader, studioReader, m) if err != nil { - logger.Errorf("[movies] <%s> error getting tag JSON: %v", m.Name, err) + logger.Errorf("[groups] <%s> error getting tag JSON: %v", m.Name, err) continue } + tags, err := tagReader.FindByGroupID(ctx, m.ID) + if err != nil { + logger.Errorf("[groups] <%s> error getting image tag names: %v", m.Name, err) + continue + } + + newGroupJSON.Tags = tag.GetNames(tags) + + subGroups := m.SubGroups.List() + if err := func() error { + for _, sg := range subGroups { + subGroup, err := groupReader.Find(ctx, sg.GroupID) + if err != nil { + return fmt.Errorf("error getting sub group: %v", err) + } + + newGroupJSON.SubGroups = append(newGroupJSON.SubGroups, jsonschema.SubGroupDescription{ + // TODO - this won't be unique + Group: subGroup.Name, + Description: sg.Description, + }) + } + return nil + }(); err != nil { + logger.Errorf("[groups] <%s> %v", m.Name, err) + } + if t.includeDependencies { if m.StudioID != nil { t.studios.IDs = sliceutil.AppendUnique(t.studios.IDs, *m.StudioID) } } - fn := newMovieJSON.Filename() + fn := newGroupJSON.Filename() - if err := t.json.saveMovie(fn, newMovieJSON); err != nil { - logger.Errorf("[movies] <%s> failed to save json: %v", m.Name, err) + if err := t.json.saveGroup(fn, newGroupJSON); err != nil { + logger.Errorf("[groups] <%s> failed to save json: %v", m.Name, err) } } } diff --git a/internal/manager/task_import.go b/internal/manager/task_import.go index 9b5de7354fb..87185c66183 100644 --- a/internal/manager/task_import.go +++ b/internal/manager/task_import.go @@ -13,12 +13,12 @@ import ( "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" + "github.com/stashapp/stash/pkg/group" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/paths" - "github.com/stashapp/stash/pkg/movie" "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/studio" @@ -127,7 +127,7 @@ func (t *ImportTask) Start(ctx context.Context) { t.ImportTags(ctx) t.ImportPerformers(ctx) t.ImportStudios(ctx) - t.ImportMovies(ctx) + t.ImportGroups(ctx) t.ImportFiles(ctx) t.ImportGalleries(ctx) @@ -292,8 +292,11 @@ func (t *ImportTask) ImportStudios(ctx context.Context) { } func (t *ImportTask) importStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio) error { + r := t.repository + importer := &studio.Importer{ ReaderWriter: t.repository.Studio, + TagWriter: r.Tag, Input: *studioJSON, MissingRefBehaviour: t.MissingRefBehaviour, } @@ -322,14 +325,15 @@ func (t *ImportTask) importStudio(ctx context.Context, studioJSON *jsonschema.St return nil } -func (t *ImportTask) ImportMovies(ctx context.Context) { - logger.Info("[movies] importing") +func (t *ImportTask) ImportGroups(ctx context.Context) { + logger.Info("[groups] importing") + pendingSubs := make(map[string][]*jsonschema.Group) - path := t.json.json.Movies + path := t.json.json.Groups files, err := os.ReadDir(path) if err != nil { if !errors.Is(err, os.ErrNotExist) { - logger.Errorf("[movies] failed to read movies directory: %v", err) + logger.Errorf("[groups] failed to read movies directory: %v", err) } return @@ -339,30 +343,79 @@ func (t *ImportTask) ImportMovies(ctx context.Context) { for i, fi := range files { index := i + 1 - movieJSON, err := jsonschema.LoadMovieFile(filepath.Join(path, fi.Name())) + groupJSON, err := jsonschema.LoadGroupFile(filepath.Join(path, fi.Name())) if err != nil { - logger.Errorf("[movies] failed to read json: %v", err) + logger.Errorf("[groups] failed to read json: %v", err) continue } - logger.Progressf("[movies] %d of %d", index, len(files)) + logger.Progressf("[groups] %d of %d", index, len(files)) if err := r.WithTxn(ctx, func(ctx context.Context) error { - movieImporter := &movie.Importer{ - ReaderWriter: r.Movie, - StudioWriter: r.Studio, - Input: *movieJSON, - MissingRefBehaviour: t.MissingRefBehaviour, + return t.importGroup(ctx, groupJSON, pendingSubs, false) + }); err != nil { + var subError group.SubGroupNotExistError + if errors.As(err, &subError) { + missingSub := subError.MissingSubGroup() + pendingSubs[missingSub] = append(pendingSubs[missingSub], groupJSON) + continue } - return performImport(ctx, movieImporter, t.DuplicateBehaviour) - }); err != nil { - logger.Errorf("[movies] <%s> import failed: %v", fi.Name(), err) + logger.Errorf("[groups] <%s> failed to import: %v", fi.Name(), err) continue } } - logger.Info("[movies] import complete") + for _, s := range pendingSubs { + for _, orphanGroupJSON := range s { + if err := r.WithTxn(ctx, func(ctx context.Context) error { + return t.importGroup(ctx, orphanGroupJSON, nil, true) + }); err != nil { + logger.Errorf("[groups] <%s> failed to create: %v", orphanGroupJSON.Name, err) + continue + } + } + } + + logger.Info("[groups] import complete") +} + +func (t *ImportTask) importGroup(ctx context.Context, groupJSON *jsonschema.Group, pendingSub map[string][]*jsonschema.Group, fail bool) error { + r := t.repository + + importer := &group.Importer{ + ReaderWriter: r.Group, + StudioWriter: r.Studio, + TagWriter: r.Tag, + Input: *groupJSON, + MissingRefBehaviour: t.MissingRefBehaviour, + } + + // first phase: return error if parent does not exist + if !fail { + importer.MissingRefBehaviour = models.ImportMissingRefEnumFail + } + + if err := performImport(ctx, importer, t.DuplicateBehaviour); err != nil { + return err + } + + for _, containingGroupJSON := range pendingSub[groupJSON.Name] { + if err := t.importGroup(ctx, containingGroupJSON, pendingSub, fail); err != nil { + var subError group.SubGroupNotExistError + if errors.As(err, &subError) { + missingSub := subError.MissingSubGroup() + pendingSub[missingSub] = append(pendingSub[missingSub], containingGroupJSON) + continue + } + + return fmt.Errorf("failed to create containing group <%s>: %v", containingGroupJSON.Name, err) + } + } + + delete(pendingSub, groupJSON.Name) + + return nil } func (t *ImportTask) ImportFiles(ctx context.Context) { @@ -644,7 +697,7 @@ func (t *ImportTask) ImportScenes(ctx context.Context) { MissingRefBehaviour: t.MissingRefBehaviour, GalleryFinder: r.Gallery, - MovieWriter: r.Movie, + GroupWriter: r.Group, PerformerWriter: r.Performer, StudioWriter: r.Studio, TagWriter: r.Tag, diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go index 298b58e279f..8bb39960140 100644 --- a/internal/manager/task_stash_box_tag.go +++ b/internal/manager/task_stash_box_tag.go @@ -311,13 +311,13 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode return err } - partial := s.ToPartial(s.StoredID, t.box.Endpoint, excluded, existingStashIDs) + partial := s.ToPartial(*s.StoredID, t.box.Endpoint, excluded, existingStashIDs) - if err := studio.ValidateModify(ctx, *partial, qb); err != nil { + if err := studio.ValidateModify(ctx, partial, qb); err != nil { return err } - if _, err := qb.UpdatePartial(ctx, *partial); err != nil { + if _, err := qb.UpdatePartial(ctx, partial); err != nil { return err } @@ -435,13 +435,13 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * return err } - partial := parent.ToPartial(parent.StoredID, t.box.Endpoint, excluded, existingStashIDs) + partial := parent.ToPartial(*parent.StoredID, t.box.Endpoint, excluded, existingStashIDs) - if err := studio.ValidateModify(ctx, *partial, qb); err != nil { + if err := studio.ValidateModify(ctx, partial, qb); err != nil { return err } - if _, err := qb.UpdatePartial(ctx, *partial); err != nil { + if _, err := qb.UpdatePartial(ctx, partial); err != nil { return err } diff --git a/internal/static/embed.go b/internal/static/embed.go index d82c0b66bd7..91437a81fbf 100644 --- a/internal/static/embed.go +++ b/internal/static/embed.go @@ -1,3 +1,4 @@ +// Package static provides the static files embedded in the application. package static import ( @@ -7,7 +8,7 @@ import ( "io/fs" ) -//go:embed performer performer_male scene image tag studio movie +//go:embed performer performer_male scene image gallery tag studio group var data embed.FS const ( @@ -20,14 +21,17 @@ const ( Image = "image" DefaultImageImage = "image/image.svg" + Gallery = "gallery" + DefaultGalleryImage = "gallery/gallery.svg" + Tag = "tag" DefaultTagImage = "tag/tag.svg" Studio = "studio" DefaultStudioImage = "studio/studio.svg" - Movie = "movie" - DefaultMovieImage = "movie/movie.png" + Group = "group" + DefaultGroupImage = "group/group.svg" ) // Sub returns an FS rooted at path, using fs.Sub. diff --git a/internal/static/gallery/gallery.svg b/internal/static/gallery/gallery.svg new file mode 100644 index 00000000000..5fb2edc52d2 --- /dev/null +++ b/internal/static/gallery/gallery.svg @@ -0,0 +1,6 @@ + + + \ No newline at end of file diff --git a/internal/static/group/group.svg b/internal/static/group/group.svg new file mode 100644 index 00000000000..2b9f0dcbdcc --- /dev/null +++ b/internal/static/group/group.svg @@ -0,0 +1,6 @@ + + + \ No newline at end of file diff --git a/internal/static/movie/movie.png b/internal/static/movie/movie.png deleted file mode 100644 index 0bb8b00a6cf..00000000000 Binary files a/internal/static/movie/movie.png and /dev/null differ diff --git a/pkg/ffmpeg/browser.go b/pkg/ffmpeg/browser.go index 5e34a5f140d..d8bcc0b4ff1 100644 --- a/pkg/ffmpeg/browser.go +++ b/pkg/ffmpeg/browser.go @@ -20,7 +20,7 @@ var validForHevc = []Container{Mp4} var validAudioForMkv = []ProbeAudioCodec{Aac, Mp3, Vorbis, Opus} var validAudioForWebm = []ProbeAudioCodec{Vorbis, Opus} -var validAudioForMp4 = []ProbeAudioCodec{Aac, Mp3} +var validAudioForMp4 = []ProbeAudioCodec{Aac, Mp3, Opus} var ( // ErrUnsupportedVideoCodecForBrowser is returned when the video codec is not supported for browser streaming. diff --git a/pkg/ffmpeg/codec.go b/pkg/ffmpeg/codec.go index 1195fdc3dd3..45fff9ffbe3 100644 --- a/pkg/ffmpeg/codec.go +++ b/pkg/ffmpeg/codec.go @@ -1,25 +1,32 @@ package ffmpeg -type VideoCodec string +type VideoCodec struct { + Name string // The full name of the codec including profile/quality + CodeName string // The core codec name without profile/quality suffix +} + +func makeVideoCodec(name string, codename string) VideoCodec { + return VideoCodec{name, codename} +} func (c VideoCodec) Args() []string { - if c == "" { + if c.CodeName == "" { return nil } - return []string{"-c:v", string(c)} + return []string{"-c:v", string(c.CodeName)} } var ( // Software codec's - VideoCodecLibX264 VideoCodec = "libx264" - VideoCodecLibWebP VideoCodec = "libwebp" - VideoCodecBMP VideoCodec = "bmp" - VideoCodecMJpeg VideoCodec = "mjpeg" - VideoCodecVP9 VideoCodec = "libvpx-vp9" - VideoCodecVPX VideoCodec = "libvpx" - VideoCodecLibX265 VideoCodec = "libx265" - VideoCodecCopy VideoCodec = "copy" + VideoCodecLibX264 = makeVideoCodec("x264", "libx264") + VideoCodecLibWebP = makeVideoCodec("WebP", "libwebp") + VideoCodecBMP = makeVideoCodec("BMP", "bmp") + VideoCodecMJpeg = makeVideoCodec("Jpeg", "mjpeg") + VideoCodecVP9 = makeVideoCodec("VPX-VP9", "libvpx-vp9") + VideoCodecVPX = makeVideoCodec("VPX-VP8", "libvpx") + VideoCodecLibX265 = makeVideoCodec("x265", "libx265") + VideoCodecCopy = makeVideoCodec("Copy", "copy") ) type AudioCodec string diff --git a/pkg/ffmpeg/codec_hardware.go b/pkg/ffmpeg/codec_hardware.go index 4c39cb3b91a..5151e7efe1b 100644 --- a/pkg/ffmpeg/codec_hardware.go +++ b/pkg/ffmpeg/codec_hardware.go @@ -4,7 +4,9 @@ import ( "bytes" "context" "fmt" + "math" "regexp" + "strconv" "strings" "github.com/stashapp/stash/pkg/logger" @@ -13,42 +15,49 @@ import ( var ( // Hardware codec's - VideoCodecN264 VideoCodec = "h264_nvenc" - VideoCodecI264 VideoCodec = "h264_qsv" - VideoCodecA264 VideoCodec = "h264_amf" - VideoCodecM264 VideoCodec = "h264_videotoolbox" - VideoCodecV264 VideoCodec = "h264_vaapi" - VideoCodecR264 VideoCodec = "h264_v4l2m2m" - VideoCodecO264 VideoCodec = "h264_omx" - VideoCodecIVP9 VideoCodec = "vp9_qsv" - VideoCodecVVP9 VideoCodec = "vp9_vaapi" - VideoCodecVVPX VideoCodec = "vp8_vaapi" + VideoCodecN264 = makeVideoCodec("H264 NVENC", "h264_nvenc") + VideoCodecN264H = makeVideoCodec("H264 NVENC HQ profile", "h264_nvenc") + VideoCodecI264 = makeVideoCodec("H264 Intel Quick Sync Video (QSV)", "h264_qsv") + VideoCodecI264C = makeVideoCodec("H264 Intel Quick Sync Video (QSV) Compatibility profile", "h264_qsv") + VideoCodecA264 = makeVideoCodec("H264 Advanced Media Framework (AMF)", "h264_amf") + VideoCodecM264 = makeVideoCodec("H264 VideoToolbox", "h264_videotoolbox") + VideoCodecV264 = makeVideoCodec("H264 VAAPI", "h264_vaapi") + VideoCodecR264 = makeVideoCodec("H264 V4L2M2M", "h264_v4l2m2m") + VideoCodecO264 = makeVideoCodec("H264 OMX", "h264_omx") + VideoCodecIVP9 = makeVideoCodec("VP9 Intel Quick Sync Video (QSV)", "vp9_qsv") + VideoCodecVVP9 = makeVideoCodec("VP9 VAAPI", "vp9_vaapi") + VideoCodecVVPX = makeVideoCodec("VP8 VAAPI", "vp8_vaapi") ) -const minHeight int = 256 +const minHeight int = 480 // Tests all (given) hardware codec's func (f *FFMpeg) InitHWSupport(ctx context.Context) { var hwCodecSupport []VideoCodec + // Note that the first compatible codec is returned, so order is important for _, codec := range []VideoCodec{ + VideoCodecN264H, VideoCodecN264, VideoCodecI264, + VideoCodecI264C, VideoCodecV264, VideoCodecR264, VideoCodecIVP9, VideoCodecVVP9, + VideoCodecM264, } { var args Args args = append(args, "-hide_banner") args = args.LogLevel(LogLevelWarning) args = f.hwDeviceInit(args, codec, false) args = args.Format("lavfi") - args = args.Input(fmt.Sprintf("color=c=red:s=%dx%d", 1280, 720)) + vFile := &models.VideoFile{Width: 1280, Height: 720} + args = args.Input(fmt.Sprintf("color=c=red:s=%dx%d", vFile.Width, vFile.Height)) args = args.Duration(0.1) // Test scaling - videoFilter := f.hwMaxResFilter(codec, 1280, 720, minHeight, false) + videoFilter := f.hwMaxResFilter(codec, vFile, minHeight, false) args = append(args, CodecInit(codec)...) args = args.VideoFilter(videoFilter) @@ -75,7 +84,7 @@ func (f *FFMpeg) InitHWSupport(ctx context.Context) { outstr := fmt.Sprintf("[InitHWSupport] Supported HW codecs [%d]:\n", len(hwCodecSupport)) for _, codec := range hwCodecSupport { - outstr += fmt.Sprintf("\t%s\n", codec) + outstr += fmt.Sprintf("\t%s - %s\n", codec.Name, codec.CodeName) } logger.Info(outstr) @@ -93,9 +102,9 @@ func (f *FFMpeg) hwCanFullHWTranscode(ctx context.Context, codec VideoCodec, vf args = args.XError() args = f.hwDeviceInit(args, codec, true) args = args.Input(vf.Path) - args = args.Duration(0.1) + args = args.Duration(1) - videoFilter := f.hwMaxResFilter(codec, vf.Width, vf.Height, reqHeight, true) + videoFilter := f.hwMaxResFilter(codec, vf, reqHeight, true) args = append(args, CodecInit(codec)...) args = args.VideoFilter(videoFilter) @@ -124,16 +133,17 @@ func (f *FFMpeg) hwCanFullHWTranscode(ctx context.Context, codec VideoCodec, vf // Prepend input for hardware encoding only func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { switch toCodec { - case VideoCodecN264: + case VideoCodecN264, + VideoCodecN264H: args = append(args, "-hwaccel_device") args = append(args, "0") if fullhw { + args = append(args, "-threads") + args = append(args, "1") args = append(args, "-hwaccel") args = append(args, "cuda") args = append(args, "-hwaccel_output_format") args = append(args, "cuda") - args = append(args, "-extra_hw_frames") - args = append(args, "5") } case VideoCodecV264, VideoCodecVVP9: @@ -146,6 +156,7 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { args = append(args, "vaapi") } case VideoCodecI264, + VideoCodecI264C, VideoCodecIVP9: if fullhw { args = append(args, "-hwaccel") @@ -158,6 +169,16 @@ func (f *FFMpeg) hwDeviceInit(args Args, toCodec VideoCodec, fullhw bool) Args { args = append(args, "-filter_hw_device") args = append(args, "hw") } + case VideoCodecM264: + if fullhw { + args = append(args, "-hwaccel") + args = append(args, "videotoolbox") + args = append(args, "-hwaccel_output_format") + args = append(args, "videotoolbox_vld") + } else { + args = append(args, "-init_hw_device") + args = append(args, "videotoolbox=vt") + } } return args @@ -173,91 +194,160 @@ func (f *FFMpeg) hwFilterInit(toCodec VideoCodec, fullhw bool) VideoFilter { videoFilter = videoFilter.Append("format=nv12") videoFilter = videoFilter.Append("hwupload") } - case VideoCodecN264: + case VideoCodecN264, VideoCodecN264H: if !fullhw { videoFilter = videoFilter.Append("format=nv12") videoFilter = videoFilter.Append("hwupload_cuda") } case VideoCodecI264, + VideoCodecI264C, VideoCodecIVP9: if !fullhw { videoFilter = videoFilter.Append("hwupload=extra_hw_frames=64") videoFilter = videoFilter.Append("format=qsv") } + case VideoCodecM264: + if !fullhw { + videoFilter = videoFilter.Append("format=nv12") + videoFilter = videoFilter.Append("hwupload") + } } return videoFilter } -var scaler_re = regexp.MustCompile(`scale=(?P[-\d]+:[-\d]+)`) +var scaler_re = regexp.MustCompile(`scale=(?P([-\d]+):([-\d]+))`) -func templateReplaceScale(input string, template string, match []int, minusonehack bool) string { +func templateReplaceScale(input string, template string, match []int, vf *models.VideoFile, minusonehack bool) string { result := []byte{} - res := string(scaler_re.ExpandString(result, template, input, match)) - - // BUG: [scale_qsv]: Size values less than -1 are not acceptable. - // Fix: Replace all instances of -2 with -1 in a scale operation if minusonehack { - res = strings.ReplaceAll(res, "-2", "-1") + // Parse width and height + w, err := strconv.Atoi(input[match[4]:match[5]]) + if err != nil { + logger.Error("failed to parse width") + return input + } + h, err := strconv.Atoi(input[match[6]:match[7]]) + if err != nil { + logger.Error("failed to parse height") + return input + } + + // Calculate ratio + ratio := float64(vf.Width) / float64(vf.Height) + if w < 0 { + w = int(math.Round(float64(h) * ratio)) + } else if h < 0 { + h = int(math.Round(float64(w) / ratio)) + } + + // Fix not divisible by 2 errors + if w%2 != 0 { + w++ + } + if h%2 != 0 { + h++ + } + + template = strings.ReplaceAll(template, "$value", fmt.Sprintf("%d:%d", w, h)) } + res := string(scaler_re.ExpandString(result, template, input, match)) + matchStart := match[0] matchEnd := match[1] return input[0:matchStart] + res + input[matchEnd:] } -// Replace video filter scaling with hardware scaling for full hardware transcoding -func (f *FFMpeg) hwCodecFilter(args VideoFilter, codec VideoCodec, fullhw bool) VideoFilter { +// Replace video filter scaling with hardware scaling for full hardware transcoding (also fixes the format) +func (f *FFMpeg) hwCodecFilter(args VideoFilter, codec VideoCodec, vf *models.VideoFile, fullhw bool) VideoFilter { sargs := string(args) match := scaler_re.FindStringSubmatchIndex(sargs) if match == nil { - return args + return f.hwApplyFullHWFilter(args, codec, fullhw) } + return f.hwApplyScaleTemplate(sargs, codec, match, vf, fullhw) +} + +// Apply format switching if applicable +func (f *FFMpeg) hwApplyFullHWFilter(args VideoFilter, codec VideoCodec, fullhw bool) VideoFilter { switch codec { - case VideoCodecN264: - template := "scale_cuda=$value" - // In 10bit inputs you might get an error like "10 bit encode not supported" - if fullhw && f.version.major >= 5 { - template += ":format=nv12" + case VideoCodecN264, VideoCodecN264H: + if fullhw && f.version.Gteq(Version{major: 5}) { // Added in FFMpeg 5 + args = args.Append("scale_cuda=format=yuv420p") + } + case VideoCodecV264, VideoCodecVVP9: + if fullhw && f.version.Gteq(Version{major: 3, minor: 1}) { // Added in FFMpeg 3.1 + args = args.Append("scale_vaapi=format=nv12") + } + case VideoCodecI264, VideoCodecI264C, VideoCodecIVP9: + if fullhw && f.version.Gteq(Version{major: 3, minor: 3}) { // Added in FFMpeg 3.3 + args = args.Append("scale_qsv=format=nv12") } - args = VideoFilter(templateReplaceScale(sargs, template, match, false)) - case VideoCodecV264, - VideoCodecVVP9: - template := "scale_vaapi=$value" - args = VideoFilter(templateReplaceScale(sargs, template, match, false)) - case VideoCodecI264, - VideoCodecIVP9: - template := "scale_qsv=$value" - args = VideoFilter(templateReplaceScale(sargs, template, match, true)) } return args } +// Switch scaler +func (f *FFMpeg) hwApplyScaleTemplate(sargs string, codec VideoCodec, match []int, vf *models.VideoFile, fullhw bool) VideoFilter { + var template string + + switch codec { + case VideoCodecN264, VideoCodecN264H: + template = "scale_cuda=$value" + if fullhw && f.version.Gteq(Version{major: 5}) { // Added in FFMpeg 5 + template += ":format=yuv420p" + } + case VideoCodecV264, VideoCodecVVP9: + template = "scale_vaapi=$value" + if fullhw && f.version.Gteq(Version{major: 3, minor: 1}) { // Added in FFMpeg 3.1 + template += ":format=nv12" + } + case VideoCodecI264, VideoCodecI264C, VideoCodecIVP9: + template = "scale_qsv=$value" + if fullhw && f.version.Gteq(Version{major: 3, minor: 3}) { // Added in FFMpeg 3.3 + template += ":format=nv12" + } + case VideoCodecM264: + template = "scale_vt=$value" + default: + return VideoFilter(sargs) + } + + // BUG: [scale_qsv]: Size values less than -1 are not acceptable. + isIntel := codec == VideoCodecI264 || codec == VideoCodecI264C || codec == VideoCodecIVP9 + // BUG: scale_vt doesn't call ff_scale_adjust_dimensions, thus cant accept negative size values + isApple := codec == VideoCodecM264 + return VideoFilter(templateReplaceScale(sargs, template, match, vf, isIntel || isApple)) +} + // Returns the max resolution for a given codec, or a default -func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec, dW int, dH int) (int, int) { +func (f *FFMpeg) hwCodecMaxRes(codec VideoCodec) (int, int) { switch codec { case VideoCodecN264, - VideoCodecI264: + VideoCodecN264H, + VideoCodecI264, + VideoCodecI264C: return 4096, 4096 } - return dW, dH + return 0, 0 } // Return a maxres filter -func (f *FFMpeg) hwMaxResFilter(toCodec VideoCodec, width int, height int, reqHeight int, fullhw bool) VideoFilter { - if width == 0 || height == 0 { +func (f *FFMpeg) hwMaxResFilter(toCodec VideoCodec, vf *models.VideoFile, reqHeight int, fullhw bool) VideoFilter { + if vf.Width == 0 || vf.Height == 0 { return "" } videoFilter := f.hwFilterInit(toCodec, fullhw) - maxWidth, maxHeight := f.hwCodecMaxRes(toCodec, width, height) - videoFilter = videoFilter.ScaleMaxLM(width, height, reqHeight, maxWidth, maxHeight) - return f.hwCodecFilter(videoFilter, toCodec, fullhw) + maxWidth, maxHeight := f.hwCodecMaxRes(toCodec) + videoFilter = videoFilter.ScaleMaxLM(vf.Width, vf.Height, reqHeight, maxWidth, maxHeight) + return f.hwCodecFilter(videoFilter, toCodec, vf, fullhw) } // Return if a hardware accelerated for HLS is available @@ -265,9 +355,12 @@ func (f *FFMpeg) hwCodecHLSCompatible() *VideoCodec { for _, element := range f.hwCodecSupport { switch element { case VideoCodecN264, + VideoCodecN264H, VideoCodecI264, + VideoCodecI264C, VideoCodecV264, - VideoCodecR264: + VideoCodecR264, + VideoCodecM264: // Note that the Apple encoder sucks at startup, thus HLS quality is crap return &element } } @@ -279,7 +372,10 @@ func (f *FFMpeg) hwCodecMP4Compatible() *VideoCodec { for _, element := range f.hwCodecSupport { switch element { case VideoCodecN264, - VideoCodecI264: + VideoCodecN264H, + VideoCodecI264, + VideoCodecI264C, + VideoCodecM264: return &element } } diff --git a/pkg/ffmpeg/downloader.go b/pkg/ffmpeg/downloader.go index 407f38a3f0a..2498c84418e 100644 --- a/pkg/ffmpeg/downloader.go +++ b/pkg/ffmpeg/downloader.go @@ -12,11 +12,11 @@ func GetFFmpegURL() []string { case "linux": switch runtime.GOARCH { case "amd64": - urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffmpeg-4.2.1-linux-64.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffprobe-4.2.1-linux-64.zip"} + urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffmpeg-6.1-linux-64.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffprobe-6.1-linux-64.zip"} case "arm": - urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffmpeg-4.2.1-linux-armhf-32.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffprobe-4.2.1-linux-armhf-32.zip"} + urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffmpeg-6.1-linux-armhf-32.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffprobe-6.1-linux-armhf-32.zip"} case "arm64": - urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffmpeg-4.2.1-linux-arm-64.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v4.2.1/ffprobe-4.2.1-linux-arm-64.zip"} + urls = []string{"https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffmpeg-6.1-linux-arm-64.zip", "https://github.com/ffbinaries/ffbinaries-prebuilt/releases/download/v6.1/ffprobe-6.1-linux-arm-64.zip"} } case "windows": urls = []string{"https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-essentials.zip"} diff --git a/pkg/ffmpeg/ffmpeg.go b/pkg/ffmpeg/ffmpeg.go index e929cc7f8dc..ce1232e5d26 100644 --- a/pkg/ffmpeg/ffmpeg.go +++ b/pkg/ffmpeg/ffmpeg.go @@ -145,6 +145,8 @@ func ResolveFFMpeg(path string, fallbackPath string) string { return ret } +var version_re = regexp.MustCompile(`ffmpeg version n?((\d+)\.(\d+)(?:\.(\d+))?)`) + func (f *FFMpeg) getVersion() error { var args Args args = append(args, "-version") @@ -158,7 +160,6 @@ func (f *FFMpeg) getVersion() error { return err } - version_re := regexp.MustCompile(`ffmpeg version ((\d+)\.(\d+)(?:\.(\d+))?)`) stdoutStr := stdout.String() match := version_re.FindStringSubmatchIndex(stdoutStr) if match == nil { @@ -183,22 +184,40 @@ func (f *FFMpeg) getVersion() error { if i, err := strconv.Atoi(patchS); err == nil { f.version.patch = i } - logger.Debugf("FFMpeg version %d.%d.%d detected", f.version.major, f.version.minor, f.version.patch) + logger.Debugf("FFMpeg version %s detected", f.version.String()) return nil } // FFMpeg version params -type FFMpegVersion struct { +type Version struct { major int minor int patch int } +// Gteq returns true if the version is greater than or equal to the other version. +func (v Version) Gteq(other Version) bool { + if v.major > other.major { + return true + } + if v.major == other.major && v.minor > other.minor { + return true + } + if v.major == other.major && v.minor == other.minor && v.patch >= other.patch { + return true + } + return false +} + +func (v Version) String() string { + return fmt.Sprintf("%d.%d.%d", v.major, v.minor, v.patch) +} + // FFMpeg provides an interface to ffmpeg. type FFMpeg struct { ffmpeg string - version FFMpegVersion + version Version hwCodecSupport []VideoCodec } diff --git a/pkg/ffmpeg/ffmpeg_test.go b/pkg/ffmpeg/ffmpeg_test.go new file mode 100644 index 00000000000..a56c7e61ad0 --- /dev/null +++ b/pkg/ffmpeg/ffmpeg_test.go @@ -0,0 +1,75 @@ +// Package ffmpeg provides a wrapper around the ffmpeg and ffprobe executables. +package ffmpeg + +import "testing" + +func TestFFMpegVersion_GreaterThan(t *testing.T) { + tests := []struct { + name string + this Version + other Version + want bool + }{ + { + "major greater, minor equal, patch equal", + Version{2, 0, 0}, + Version{1, 0, 0}, + true, + }, + { + "major greater, minor less, patch less", + Version{2, 1, 1}, + Version{1, 0, 0}, + true, + }, + { + "major equal, minor greater, patch equal", + Version{1, 1, 0}, + Version{1, 0, 0}, + true, + }, + { + "major equal, minor equal, patch greater", + Version{1, 0, 1}, + Version{1, 0, 0}, + true, + }, + { + "major equal, minor equal, patch equal", + Version{1, 0, 0}, + Version{1, 0, 0}, + true, + }, + { + "major less, minor equal, patch equal", + Version{1, 0, 0}, + Version{2, 0, 0}, + false, + }, + { + "major equal, minor less, patch equal", + Version{1, 0, 0}, + Version{1, 1, 0}, + false, + }, + { + "major equal, minor equal, patch less", + Version{1, 0, 0}, + Version{1, 0, 1}, + false, + }, + { + "major less, minor less, patch less", + Version{1, 0, 0}, + Version{2, 1, 1}, + false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := tt.this.Gteq(tt.other); got != tt.want { + t.Errorf("FFMpegVersion.GreaterThan() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/ffmpeg/ffprobe.go b/pkg/ffmpeg/ffprobe.go index 31b3cbf0029..59f8ed218b5 100644 --- a/pkg/ffmpeg/ffprobe.go +++ b/pkg/ffmpeg/ffprobe.go @@ -1,12 +1,14 @@ package ffmpeg import ( + "bytes" "encoding/json" "errors" "fmt" "math" "os" "os/exec" + "regexp" "strconv" "strings" "time" @@ -16,6 +18,8 @@ import ( "github.com/stashapp/stash/pkg/logger" ) +const minimumFFProbeVersion = 5 + func ValidateFFProbe(ffprobePath string) error { cmd := stashExec.Command(ffprobePath, "-h") bytes, err := cmd.CombinedOutput() @@ -139,16 +143,94 @@ func (v *VideoFile) TranscodeScale(maxSize int) (int, int) { } // FFProbe provides an interface to the ffprobe executable. -type FFProbe string +type FFProbe struct { + path string + version Version +} func (f *FFProbe) Path() string { - return string(*f) + return f.path +} + +var ffprobeVersionRE = regexp.MustCompile(`ffprobe version n?((\d+)\.(\d+)(?:\.(\d+))?)`) + +func (f *FFProbe) getVersion() error { + var args []string + args = append(args, "-version") + cmd := stashExec.Command(f.path, args...) + + var stdout bytes.Buffer + cmd.Stdout = &stdout + + var err error + if err = cmd.Run(); err != nil { + return err + } + + stdoutStr := stdout.String() + match := ffprobeVersionRE.FindStringSubmatchIndex(stdoutStr) + if match == nil { + return errors.New("version string malformed") + } + + majorS := stdoutStr[match[4]:match[5]] + minorS := stdoutStr[match[6]:match[7]] + + // patch is optional + var patchS string + if match[8] != -1 && match[9] != -1 { + patchS = stdoutStr[match[8]:match[9]] + } + + if i, err := strconv.Atoi(majorS); err == nil { + f.version.major = i + } + if i, err := strconv.Atoi(minorS); err == nil { + f.version.minor = i + } + if i, err := strconv.Atoi(patchS); err == nil { + f.version.patch = i + } + logger.Debugf("FFProbe version %s detected", f.version.String()) + + return nil +} + +// Creates a new FFProbe instance. +func NewFFProbe(path string) *FFProbe { + ret := &FFProbe{ + path: path, + } + if err := ret.getVersion(); err != nil { + logger.Warnf("FFProbe version not detected %v", err) + } + + if ret.version.major != 0 && ret.version.major < minimumFFProbeVersion { + logger.Warnf("FFProbe version %d.%d.%d detected, but %d.x or later is required", ret.version.major, ret.version.minor, ret.version.patch, minimumFFProbeVersion) + } + + return ret } // NewVideoFile runs ffprobe on the given path and returns a VideoFile. func (f *FFProbe) NewVideoFile(videoPath string) (*VideoFile, error) { - args := []string{"-v", "quiet", "-print_format", "json", "-show_format", "-show_streams", "-show_error", videoPath} - cmd := stashExec.Command(string(*f), args...) + args := []string{ + "-v", + "quiet", + "-print_format", "json", + "-show_format", + "-show_streams", + "-show_error", + } + + // show_entries stream_side_data=rotation requires 5.x or later ffprobe + if f.version.major >= 5 { + args = append(args, "-show_entries", "stream_side_data=rotation") + } + + args = append(args, videoPath) + + cmd := stashExec.Command(f.path, args...) out, err := cmd.Output() if err != nil { @@ -167,7 +249,7 @@ func (f *FFProbe) NewVideoFile(videoPath string) (*VideoFile, error) { // Used when the frame count is missing or incorrect. func (f *FFProbe) GetReadFrameCount(path string) (int64, error) { args := []string{"-v", "quiet", "-print_format", "json", "-count_frames", "-show_format", "-show_streams", "-show_error", path} - out, err := stashExec.Command(string(*f), args...).Output() + out, err := stashExec.Command(f.path, args...).Output() if err != nil { return 0, fmt.Errorf("FFProbe encountered an error with <%s>.\nError JSON:\n%s\nError: %s", path, string(out), err.Error()) @@ -246,13 +328,14 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) { framerate = 0 } result.FrameRate = math.Round(framerate*100) / 100 - if rotate, err := strconv.ParseInt(videoStream.Tags.Rotate, 10, 64); err == nil && rotate != 180 { + result.Width = videoStream.Width + result.Height = videoStream.Height + + if isRotated(videoStream) { result.Width = videoStream.Height result.Height = videoStream.Width - } else { - result.Width = videoStream.Width - result.Height = videoStream.Height } + result.VideoStreamDuration, err = strconv.ParseFloat(videoStream.Duration, 64) if err != nil { // Revert to the historical behaviour, which is still correct in the vast majority of cases. @@ -263,6 +346,25 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) { return result, nil } +func isRotated(s *FFProbeStream) bool { + rotate, _ := strconv.ParseInt(s.Tags.Rotate, 10, 64) + if rotate != 180 && rotate != 0 { + return true + } + + for _, sd := range s.SideDataList { + r := sd.Rotation + if r < 0 { + r = -r + } + if r != 0 && r != 180 { + return true + } + } + + return false +} + func (v *VideoFile) getAudioStream() *FFProbeStream { index := v.getStreamIndex("audio", v.JSON) if index != -1 { diff --git a/pkg/ffmpeg/filter.go b/pkg/ffmpeg/filter.go index 52be57c9c06..dd6ecc10699 100644 --- a/pkg/ffmpeg/filter.go +++ b/pkg/ffmpeg/filter.go @@ -59,33 +59,28 @@ func (f VideoFilter) ScaleMax(inputWidth, inputHeight, maxSize int) VideoFilter return f.ScaleDimensions(maxSize, -2) } -// ScaleMaxLM returns a VideoFilter scaling to maxSize with respect to a max size. +// ScaleMaxLM scales an image to fit within specified maximum dimensions while maintaining its aspect ratio. func (f VideoFilter) ScaleMaxLM(width int, height int, reqHeight int, maxWidth int, maxHeight int) VideoFilter { - // calculate the aspect ratio of the current resolution - aspectRatio := width / height + if maxWidth == 0 || maxHeight == 0 { + return f.ScaleMax(width, height, reqHeight) + } - // find the max height + aspectRatio := float64(width) / float64(height) desiredHeight := reqHeight if desiredHeight == 0 { desiredHeight = height } + desiredWidth := int(float64(desiredHeight) * aspectRatio) - // calculate the desired width based on the desired height and the aspect ratio - desiredWidth := int(desiredHeight * aspectRatio) - - // check which dimension to scale based on the maximum resolution - if desiredHeight > maxHeight || desiredWidth > maxWidth { - if desiredHeight-maxHeight > desiredWidth-maxWidth { - // scale the height down to the maximum height - return f.ScaleDimensions(-2, maxHeight) - } else { - // scale the width down to the maximum width - return f.ScaleDimensions(maxWidth, -2) - } + if desiredHeight <= maxHeight && desiredWidth <= maxWidth { + return f.ScaleMax(width, height, reqHeight) } - // the current resolution can be scaled to the desired height without exceeding the maximum resolution - return f.ScaleMax(width, height, reqHeight) + if float64(desiredHeight-maxHeight) > float64(desiredWidth-maxWidth) { + return f.ScaleDimensions(-2, maxHeight) + } else { + return f.ScaleDimensions(maxWidth, -2) + } } // Fps returns a VideoFilter setting the frames per second. diff --git a/pkg/ffmpeg/stream.go b/pkg/ffmpeg/stream.go index b94c03b55a3..cd043dadcee 100644 --- a/pkg/ffmpeg/stream.go +++ b/pkg/ffmpeg/stream.go @@ -23,7 +23,7 @@ const ( type StreamManager struct { cacheDir string encoder *FFMpeg - ffprobe FFProbe + ffprobe *FFProbe config StreamManagerConfig lockManager *fsutil.ReadLockManager @@ -42,7 +42,7 @@ type StreamManagerConfig interface { GetTranscodeHardwareAcceleration() bool } -func NewStreamManager(cacheDir string, encoder *FFMpeg, ffprobe FFProbe, config StreamManagerConfig, lockManager *fsutil.ReadLockManager) *StreamManager { +func NewStreamManager(cacheDir string, encoder *FFMpeg, ffprobe *FFProbe, config StreamManagerConfig, lockManager *fsutil.ReadLockManager) *StreamManager { if cacheDir == "" { logger.Warn("cache directory is not set. Live HLS/DASH transcoding will be disabled") } diff --git a/pkg/ffmpeg/stream_segmented.go b/pkg/ffmpeg/stream_segmented.go index 1058fb8eb27..7d529b82c41 100644 --- a/pkg/ffmpeg/stream_segmented.go +++ b/pkg/ffmpeg/stream_segmented.go @@ -8,6 +8,7 @@ import ( "io" "math" "net/http" + "net/url" "os" "os/exec" "path/filepath" @@ -45,6 +46,10 @@ const ( // maximum idle time between segment requests before // stopping transcode and deleting cache folder maxIdleTime = 30 * time.Second + + resolutionParamKey = "resolution" + // TODO - setting the apikey in here isn't ideal + apiKeyParamKey = "apikey" ) type StreamType struct { @@ -342,7 +347,7 @@ func (s *runningStream) makeStreamArgs(sm *StreamManager, segment int) Args { videoOnly := ProbeAudioCodec(s.vf.AudioCodec) == MissingUnsupported - videoFilter := sm.encoder.hwMaxResFilter(codec, s.vf.Width, s.vf.Height, s.maxTranscodeSize, fullhw) + videoFilter := sm.encoder.hwMaxResFilter(codec, s.vf, s.maxTranscodeSize, fullhw) args = append(args, s.streamType.Args(codec, segment, videoFilter, videoOnly, s.outputDir)...) @@ -425,9 +430,21 @@ func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, baseUrl.RawQuery = "" baseURL := baseUrl.String() - var urlQuery string + urlQuery := url.Values{} + apikey := r.URL.Query().Get(apiKeyParamKey) + if resolution != "" { - urlQuery = fmt.Sprintf("?resolution=%s", resolution) + urlQuery.Set(resolutionParamKey, resolution) + } + + // TODO - this needs to be handled outside of this package + if apikey != "" { + urlQuery.Set(apiKeyParamKey, apikey) + } + + urlQueryString := "" + if len(urlQuery) > 0 { + urlQueryString = "?" + urlQuery.Encode() } var buf bytes.Buffer @@ -449,7 +466,7 @@ func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, } fmt.Fprintf(&buf, "#EXTINF:%f,\n", thisLength) - fmt.Fprintf(&buf, "%s/%d.ts%s\n", baseURL, segment, urlQuery) + fmt.Fprintf(&buf, "%s/%d.ts%s\n", baseURL, segment, urlQueryString) leftover -= thisLength segment++ @@ -508,11 +525,18 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request videoWidth = vf.Width } - var urlQuery string + urlQuery := url.Values{} + + // TODO - this needs to be handled outside of this package + apikey := r.URL.Query().Get(apiKeyParamKey) + if apikey != "" { + urlQuery.Set(apiKeyParamKey, apikey) + } + maxTranscodeSize := sm.config.GetMaxStreamingTranscodeSize().GetMaxResolution() if resolution != "" { maxTranscodeSize = models.StreamingResolutionEnum(resolution).GetMaxResolution() - urlQuery = fmt.Sprintf("?resolution=%s", resolution) + urlQuery.Set(resolutionParamKey, resolution) } if maxTranscodeSize != 0 { videoSize := videoHeight @@ -527,6 +551,11 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request } } + urlQueryString := "" + if len(urlQuery) > 0 { + urlQueryString = "?" + urlQuery.Encode() + } + mediaDuration := mpd.Duration(time.Duration(probeResult.FileDuration * float64(time.Second))) m := mpd.NewMPD(mpd.DASH_PROFILE_LIVE, mediaDuration.String(), "PT4.0S") @@ -536,12 +565,12 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request video, _ := m.AddNewAdaptationSetVideo(MimeWebmVideo, "progressive", true, 1) - _, _ = video.SetNewSegmentTemplate(2, "init_v.webm"+urlQuery, "$Number$_v.webm"+urlQuery, 0, 1) + _, _ = video.SetNewSegmentTemplate(2, "init_v.webm"+urlQueryString, "$Number$_v.webm"+urlQueryString, 0, 1) _, _ = video.AddNewRepresentationVideo(200000, "vp09.00.40.08", "0", framerate, int64(videoWidth), int64(videoHeight)) if ProbeAudioCodec(vf.AudioCodec) != MissingUnsupported { audio, _ := m.AddNewAdaptationSetAudio(MimeWebmAudio, true, 1, "und") - _, _ = audio.SetNewSegmentTemplate(2, "init_a.webm"+urlQuery, "$Number$_a.webm"+urlQuery, 0, 1) + _, _ = audio.SetNewSegmentTemplate(2, "init_a.webm"+urlQueryString, "$Number$_a.webm"+urlQueryString, 0, 1) _, _ = audio.AddNewRepresentationAudio(48000, 96000, "opus", "1") } diff --git a/pkg/ffmpeg/stream_transcode.go b/pkg/ffmpeg/stream_transcode.go index ce56e07956c..e0a30cdd9e5 100644 --- a/pkg/ffmpeg/stream_transcode.go +++ b/pkg/ffmpeg/stream_transcode.go @@ -45,12 +45,31 @@ func CodecInit(codec VideoCodec) (args Args) { "-rc", "vbr", "-cq", "15", ) - case VideoCodecI264: + case VideoCodecN264H: + args = append(args, + "-profile", "p7", + "-tune", "hq", + "-profile", "high", + "-rc", "vbr", + "-rc-lookahead", "60", + "-surfaces", "64", + "-spatial-aq", "1", + "-aq-strength", "15", + "-cq", "15", + "-coder", "cabac", + "-b_ref_mode", "middle", + ) + case VideoCodecI264, VideoCodecIVP9: args = append(args, "-global_quality", "20", "-preset", "faster", ) - case VideoCodecV264: + case VideoCodecI264C: + args = append(args, + "-q", "20", + "-preset", "faster", + ) + case VideoCodecV264, VideoCodecVVP9: args = append(args, "-qp", "20", ) @@ -60,22 +79,13 @@ func CodecInit(codec VideoCodec) (args Args) { ) case VideoCodecM264: args = append(args, - "-prio_speed", "1", + "-realtime", "1", ) case VideoCodecO264: args = append(args, "-preset", "superfast", "-crf", "25", ) - case VideoCodecIVP9: - args = append(args, - "-global_quality", "20", - "-preset", "faster", - ) - case VideoCodecVVP9: - args = append(args, - "-qp", "20", - ) } return args @@ -198,7 +208,7 @@ func (o TranscodeOptions) makeStreamArgs(sm *StreamManager) Args { videoOnly := ProbeAudioCodec(o.VideoFile.AudioCodec) == MissingUnsupported - videoFilter := sm.encoder.hwMaxResFilter(codec, o.VideoFile.Width, o.VideoFile.Height, maxTranscodeSize, fullhw) + videoFilter := sm.encoder.hwMaxResFilter(codec, o.VideoFile, maxTranscodeSize, fullhw) args = append(args, o.StreamType.Args(codec, videoFilter, videoOnly)...) diff --git a/pkg/ffmpeg/transcoder/screenshot.go b/pkg/ffmpeg/transcoder/screenshot.go index a1ddef6b6ae..c3343d594b9 100644 --- a/pkg/ffmpeg/transcoder/screenshot.go +++ b/pkg/ffmpeg/transcoder/screenshot.go @@ -24,13 +24,13 @@ func (o *ScreenshotOptions) setDefaults() { } type ScreenshotOutputType struct { - codec ffmpeg.VideoCodec + codec *ffmpeg.VideoCodec format ffmpeg.Format } func (t ScreenshotOutputType) Args() []string { var ret []string - if t.codec != "" { + if t.codec != nil { ret = append(ret, t.codec.Args()...) } if t.format != "" { @@ -45,7 +45,7 @@ var ( format: "image2", } ScreenshotOutputTypeBMP = ScreenshotOutputType{ - codec: ffmpeg.VideoCodecBMP, + codec: &ffmpeg.VideoCodecBMP, format: "rawvideo", } ) diff --git a/pkg/ffmpeg/transcoder/splice.go b/pkg/ffmpeg/transcoder/splice.go index 7ae7e6c944c..45d71332b8d 100644 --- a/pkg/ffmpeg/transcoder/splice.go +++ b/pkg/ffmpeg/transcoder/splice.go @@ -11,7 +11,7 @@ type SpliceOptions struct { OutputPath string Format ffmpeg.Format - VideoCodec ffmpeg.VideoCodec + VideoCodec *ffmpeg.VideoCodec VideoArgs ffmpeg.Args AudioCodec ffmpeg.AudioCodec @@ -45,11 +45,11 @@ func Splice(concatFile string, options SpliceOptions) ffmpeg.Args { args = args.Overwrite() // if video codec is not provided, then use copy - if options.VideoCodec == "" { - options.VideoCodec = ffmpeg.VideoCodecCopy + if options.VideoCodec == nil { + options.VideoCodec = &ffmpeg.VideoCodecCopy } - args = args.VideoCodec(options.VideoCodec) + args = args.VideoCodec(*options.VideoCodec) args = args.AppendArgs(options.VideoArgs) // if audio codec is not provided, then use copy diff --git a/pkg/ffmpeg/types.go b/pkg/ffmpeg/types.go index c9454cb40b4..c463e7f973a 100644 --- a/pkg/ffmpeg/types.go +++ b/pkg/ffmpeg/types.go @@ -94,4 +94,7 @@ type FFProbeStream struct { MaxBitRate string `json:"max_bit_rate,omitempty"` SampleFmt string `json:"sample_fmt,omitempty"` SampleRate string `json:"sample_rate,omitempty"` + SideDataList []struct { + Rotation int `json:"rotation"` + } `json:"side_data_list"` } diff --git a/pkg/file/file.go b/pkg/file/file.go index 72c7f8a1ab4..407949ba127 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -1,3 +1,4 @@ +// Package file provides functionality for managing, scanning and cleaning files and folders. package file import ( diff --git a/pkg/file/image/orientation.go b/pkg/file/image/orientation.go new file mode 100644 index 00000000000..84f5774cf66 --- /dev/null +++ b/pkg/file/image/orientation.go @@ -0,0 +1,75 @@ +package image + +import ( + "errors" + "fmt" + "io" + + "github.com/rwcarlsen/goexif/exif" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" +) + +func adjustForOrientation(fs models.FS, path string, f *models.ImageFile) { + isFlipped, err := areDimensionsFlipped(fs, path) + if err != nil { + logger.Warnf("Error determining image orientation for %s: %v", path, err) + // isFlipped is false by default + } + + if isFlipped { + f.Width, f.Height = f.Height, f.Width + } +} + +// areDimensionsFlipped returns true if the image dimensions are flipped. +// This is determined by the EXIF orientation tag. +func areDimensionsFlipped(fs models.FS, path string) (bool, error) { + r, err := fs.Open(path) + if err != nil { + return false, fmt.Errorf("reading image file %q: %w", path, err) + } + defer r.Close() + + x, err := exif.Decode(r) + if err != nil { + if errors.Is(err, io.EOF) { + // no exif data + return false, nil + } + + return false, fmt.Errorf("decoding exif data: %w", err) + } + + o, err := x.Get(exif.Orientation) + if err != nil { + // assume not present + return false, nil + } + + oo, err := o.Int(0) + if err != nil { + return false, fmt.Errorf("decoding orientation: %w", err) + } + + return isOrientationDimensionsFlipped(oo), nil +} + +// isOrientationDimensionsFlipped returns true if the image orientation is flipped based on the input orientation EXIF value. +// From https://sirv.com/help/articles/rotate-photos-to-be-upright/ +// 1 = 0 degrees: the correct orientation, no adjustment is required. +// 2 = 0 degrees, mirrored: image has been flipped back-to-front. +// 3 = 180 degrees: image is upside down. +// 4 = 180 degrees, mirrored: image has been flipped back-to-front and is upside down. +// 5 = 90 degrees: image has been flipped back-to-front and is on its side. +// 6 = 90 degrees, mirrored: image is on its side. +// 7 = 270 degrees: image has been flipped back-to-front and is on its far side. +// 8 = 270 degrees, mirrored: image is on its far side. +func isOrientationDimensionsFlipped(o int) bool { + switch o { + case 5, 6, 7, 8: + return true + default: + return false + } +} diff --git a/pkg/file/image/scan.go b/pkg/file/image/scan.go index b78de91e0ae..a1d63f649f0 100644 --- a/pkg/file/image/scan.go +++ b/pkg/file/image/scan.go @@ -19,42 +19,23 @@ import ( // Decorator adds image specific fields to a File. type Decorator struct { - FFProbe ffmpeg.FFProbe + FFProbe *ffmpeg.FFProbe } func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { base := f.Base() - decorateFallback := func() (models.File, error) { - r, err := fs.Open(base.Path) - if err != nil { - return f, fmt.Errorf("reading image file %q: %w", base.Path, err) - } - defer r.Close() - - c, format, err := image.DecodeConfig(r) - if err != nil { - return f, fmt.Errorf("decoding image file %q: %w", base.Path, err) - } - return &models.ImageFile{ - BaseFile: base, - Format: format, - Width: c.Width, - Height: c.Height, - }, nil - } - // ignore clips in non-OsFS filesystems as ffprobe cannot read them // TODO - copy to temp file if not an OsFS if _, isOs := fs.(*file.OsFS); !isOs { logger.Debugf("assuming ImageFile for non-OsFS file %q", base.Path) - return decorateFallback() + return decorateFallback(fs, f) } probe, err := d.FFProbe.NewVideoFile(base.Path) if err != nil { logger.Warnf("File %q could not be read with ffprobe: %s, assuming ImageFile", base.Path, err) - return decorateFallback() + return decorateFallback(fs, f) } // Fallback to catch non-animated avif images that FFProbe detects as video files @@ -79,12 +60,53 @@ func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) ( return videoFileDecorator.Decorate(ctx, fs, f) } - return &models.ImageFile{ + ret := &models.ImageFile{ BaseFile: base, Format: probe.VideoCodec, Width: probe.Width, Height: probe.Height, - }, nil + } + + adjustForOrientation(fs, base.Path, ret) + + return ret, nil +} + +func decodeConfig(fs models.FS, path string) (config image.Config, format string, err error) { + r, err := fs.Open(path) + if err != nil { + err = fmt.Errorf("reading image file %q: %w", path, err) + return + } + defer r.Close() + + config, format, err = image.DecodeConfig(r) + if err != nil { + err = fmt.Errorf("decoding image file %q: %w", path, err) + return + } + return +} + +func decorateFallback(fs models.FS, f models.File) (models.File, error) { + base := f.Base() + path := base.Path + + c, format, err := decodeConfig(fs, path) + if err != nil { + return f, err + } + + ret := &models.ImageFile{ + BaseFile: base, + Format: format, + Width: c.Width, + Height: c.Height, + } + + adjustForOrientation(fs, path, ret) + + return ret, nil } func (d *Decorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { diff --git a/pkg/file/import.go b/pkg/file/import.go index 0af94a4d211..7c28197b8e5 100644 --- a/pkg/file/import.go +++ b/pkg/file/import.go @@ -89,7 +89,7 @@ func (i *Importer) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEn return i.baseFileJSONToBaseFile(ctx, ff) } - return nil, fmt.Errorf("unknown file type") + return nil, errors.New("unknown file type") } func (i *Importer) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*models.BaseFile, error) { diff --git a/pkg/file/video/scan.go b/pkg/file/video/scan.go index ca7d0be963a..21be0cd114f 100644 --- a/pkg/file/video/scan.go +++ b/pkg/file/video/scan.go @@ -12,11 +12,11 @@ import ( // Decorator adds video specific fields to a File. type Decorator struct { - FFProbe ffmpeg.FFProbe + FFProbe *ffmpeg.FFProbe } func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { - if d.FFProbe == "" { + if d.FFProbe == nil { return f, errors.New("ffprobe not configured") } diff --git a/pkg/fsutil/fs.go b/pkg/fsutil/fs.go index 0b9fc641629..2b5c37f62b8 100644 --- a/pkg/fsutil/fs.go +++ b/pkg/fsutil/fs.go @@ -1,3 +1,4 @@ +// Package fsutil provides filesystem utility functions for the application. package fsutil import ( diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index 6db604fc4d6..a764e982c60 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -1,3 +1,5 @@ +// Package gallery provides application logic for managing galleries. +// This functionality is exposed via the [Service] type. package gallery import ( diff --git a/pkg/gallery/update.go b/pkg/gallery/update.go index d66da197c81..4f8b1f198eb 100644 --- a/pkg/gallery/update.go +++ b/pkg/gallery/update.go @@ -52,6 +52,22 @@ func (s *Service) RemoveImages(ctx context.Context, g *models.Gallery, toRemove return s.Updated(ctx, g.ID) } +func (s *Service) SetCover(ctx context.Context, g *models.Gallery, coverImageID int) error { + if err := s.Repository.SetCover(ctx, g.ID, coverImageID); err != nil { + return fmt.Errorf("failed to set cover: %w", err) + } + + return s.Updated(ctx, g.ID) +} + +func (s *Service) ResetCover(ctx context.Context, g *models.Gallery) error { + if err := s.Repository.ResetCover(ctx, g.ID); err != nil { + return fmt.Errorf("failed to reset cover: %w", err) + } + + return s.Updated(ctx, g.ID) +} + func AddPerformer(ctx context.Context, qb models.GalleryUpdater, o *models.Gallery, performerID int) error { galleryPartial := models.NewGalleryPartial() galleryPartial.PerformerIDs = &models.UpdateIDs{ diff --git a/pkg/group/create.go b/pkg/group/create.go new file mode 100644 index 00000000000..56d6b7a4ed4 --- /dev/null +++ b/pkg/group/create.go @@ -0,0 +1,41 @@ +package group + +import ( + "context" + "errors" + + "github.com/stashapp/stash/pkg/models" +) + +var ( + ErrEmptyName = errors.New("name cannot be empty") + ErrHierarchyLoop = errors.New("a group cannot be contained by one of its subgroups") +) + +func (s *Service) Create(ctx context.Context, group *models.Group, frontimageData []byte, backimageData []byte) error { + r := s.Repository + + if err := s.validateCreate(ctx, group); err != nil { + return err + } + + err := r.Create(ctx, group) + if err != nil { + return err + } + + // update image table + if len(frontimageData) > 0 { + if err := r.UpdateFrontImage(ctx, group.ID, frontimageData); err != nil { + return err + } + } + + if len(backimageData) > 0 { + if err := r.UpdateBackImage(ctx, group.ID, backimageData); err != nil { + return err + } + } + + return nil +} diff --git a/pkg/group/doc.go b/pkg/group/doc.go new file mode 100644 index 00000000000..754f043fc1e --- /dev/null +++ b/pkg/group/doc.go @@ -0,0 +1,2 @@ +// Package group provides the application logic for groups. +package group diff --git a/pkg/movie/export.go b/pkg/group/export.go similarity index 92% rename from pkg/movie/export.go rename to pkg/group/export.go index 5a6c49aa364..418ce7bedd1 100644 --- a/pkg/movie/export.go +++ b/pkg/group/export.go @@ -1,4 +1,4 @@ -package movie +package group import ( "context" @@ -17,13 +17,13 @@ type ImageGetter interface { } // ToJSON converts a Movie into its JSON equivalent. -func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Movie) (*jsonschema.Movie, error) { - newMovieJSON := jsonschema.Movie{ +func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Group) (*jsonschema.Group, error) { + newMovieJSON := jsonschema.Group{ Name: movie.Name, Aliases: movie.Aliases, Director: movie.Director, Synopsis: movie.Synopsis, - URL: movie.URL, + URLs: movie.URLs.List(), CreatedAt: json.JSONTime{Time: movie.CreatedAt}, UpdatedAt: json.JSONTime{Time: movie.UpdatedAt}, } diff --git a/pkg/movie/export_test.go b/pkg/group/export_test.go similarity index 75% rename from pkg/movie/export_test.go rename to pkg/group/export_test.go index 51d57e2b6e8..5f8d9f7dce6 100644 --- a/pkg/movie/export_test.go +++ b/pkg/group/export_test.go @@ -1,4 +1,4 @@ -package movie +package group import ( "errors" @@ -62,8 +62,8 @@ var ( updateTime = time.Date(2002, 01, 01, 0, 0, 0, 0, time.UTC) ) -func createFullMovie(id int, studioID int) models.Movie { - return models.Movie{ +func createFullMovie(id int, studioID int) models.Group { + return models.Group{ ID: id, Name: movieName, Aliases: movieAliases, @@ -72,23 +72,24 @@ func createFullMovie(id int, studioID int) models.Movie { Duration: &duration, Director: director, Synopsis: synopsis, - URL: url, + URLs: models.NewRelatedStrings([]string{url}), StudioID: &studioID, CreatedAt: createTime, UpdatedAt: updateTime, } } -func createEmptyMovie(id int) models.Movie { - return models.Movie{ +func createEmptyMovie(id int) models.Group { + return models.Group{ ID: id, + URLs: models.NewRelatedStrings([]string{}), CreatedAt: createTime, UpdatedAt: updateTime, } } -func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Movie { - return &jsonschema.Movie{ +func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Group { + return &jsonschema.Group{ Name: movieName, Aliases: movieAliases, Date: date, @@ -96,7 +97,7 @@ func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Movie Duration: duration, Director: director, Synopsis: synopsis, - URL: url, + URLs: []string{url}, Studio: studio, FrontImage: frontImage, BackImage: backImage, @@ -109,8 +110,9 @@ func createFullJSONMovie(studio, frontImage, backImage string) *jsonschema.Movie } } -func createEmptyJSONMovie() *jsonschema.Movie { - return &jsonschema.Movie{ +func createEmptyJSONMovie() *jsonschema.Group { + return &jsonschema.Group{ + URLs: []string{}, CreatedAt: json.JSONTime{ Time: createTime, }, @@ -121,8 +123,8 @@ func createEmptyJSONMovie() *jsonschema.Movie { } type testScenario struct { - movie models.Movie - expected *jsonschema.Movie + movie models.Group + expected *jsonschema.Group err bool } @@ -172,18 +174,18 @@ func TestToJSON(t *testing.T) { imageErr := errors.New("error getting image") - db.Movie.On("GetFrontImage", testCtx, movieID).Return(frontImageBytes, nil).Once() - db.Movie.On("GetFrontImage", testCtx, missingStudioMovieID).Return(frontImageBytes, nil).Once() - db.Movie.On("GetFrontImage", testCtx, emptyID).Return(nil, nil).Once().Maybe() - db.Movie.On("GetFrontImage", testCtx, errFrontImageID).Return(nil, imageErr).Once() - db.Movie.On("GetFrontImage", testCtx, errBackImageID).Return(frontImageBytes, nil).Once() + db.Group.On("GetFrontImage", testCtx, movieID).Return(frontImageBytes, nil).Once() + db.Group.On("GetFrontImage", testCtx, missingStudioMovieID).Return(frontImageBytes, nil).Once() + db.Group.On("GetFrontImage", testCtx, emptyID).Return(nil, nil).Once().Maybe() + db.Group.On("GetFrontImage", testCtx, errFrontImageID).Return(nil, imageErr).Once() + db.Group.On("GetFrontImage", testCtx, errBackImageID).Return(frontImageBytes, nil).Once() - db.Movie.On("GetBackImage", testCtx, movieID).Return(backImageBytes, nil).Once() - db.Movie.On("GetBackImage", testCtx, missingStudioMovieID).Return(backImageBytes, nil).Once() - db.Movie.On("GetBackImage", testCtx, emptyID).Return(nil, nil).Once() - db.Movie.On("GetBackImage", testCtx, errBackImageID).Return(nil, imageErr).Once() - db.Movie.On("GetBackImage", testCtx, errFrontImageID).Return(backImageBytes, nil).Maybe() - db.Movie.On("GetBackImage", testCtx, errStudioMovieID).Return(backImageBytes, nil).Maybe() + db.Group.On("GetBackImage", testCtx, movieID).Return(backImageBytes, nil).Once() + db.Group.On("GetBackImage", testCtx, missingStudioMovieID).Return(backImageBytes, nil).Once() + db.Group.On("GetBackImage", testCtx, emptyID).Return(nil, nil).Once() + db.Group.On("GetBackImage", testCtx, errBackImageID).Return(nil, imageErr).Once() + db.Group.On("GetBackImage", testCtx, errFrontImageID).Return(backImageBytes, nil).Maybe() + db.Group.On("GetBackImage", testCtx, errStudioMovieID).Return(backImageBytes, nil).Maybe() studioErr := errors.New("error getting studio") @@ -193,7 +195,7 @@ func TestToJSON(t *testing.T) { for i, s := range scenarios { movie := s.movie - json, err := ToJSON(testCtx, db.Movie, db.Studio, &movie) + json, err := ToJSON(testCtx, db.Group, db.Studio, &movie) switch { case !s.err && err != nil: diff --git a/pkg/group/import.go b/pkg/group/import.go new file mode 100644 index 00000000000..589e75df30d --- /dev/null +++ b/pkg/group/import.go @@ -0,0 +1,336 @@ +package group + +import ( + "context" + "fmt" + "strings" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/jsonschema" + "github.com/stashapp/stash/pkg/sliceutil" + "github.com/stashapp/stash/pkg/utils" +) + +type ImporterReaderWriter interface { + models.GroupCreatorUpdater + FindByName(ctx context.Context, name string, nocase bool) (*models.Group, error) +} + +type SubGroupNotExistError struct { + missingSubGroup string +} + +func (e SubGroupNotExistError) Error() string { + return fmt.Sprintf("sub group <%s> does not exist", e.missingSubGroup) +} + +func (e SubGroupNotExistError) MissingSubGroup() string { + return e.missingSubGroup +} + +type Importer struct { + ReaderWriter ImporterReaderWriter + StudioWriter models.StudioFinderCreator + TagWriter models.TagFinderCreator + Input jsonschema.Group + MissingRefBehaviour models.ImportMissingRefEnum + + group models.Group + frontImageData []byte + backImageData []byte +} + +func (i *Importer) PreImport(ctx context.Context) error { + i.group = i.groupJSONToGroup(i.Input) + + if err := i.populateStudio(ctx); err != nil { + return err + } + + if err := i.populateTags(ctx); err != nil { + return err + } + + var err error + if len(i.Input.FrontImage) > 0 { + i.frontImageData, err = utils.ProcessBase64Image(i.Input.FrontImage) + if err != nil { + return fmt.Errorf("invalid front_image: %v", err) + } + } + if len(i.Input.BackImage) > 0 { + i.backImageData, err = utils.ProcessBase64Image(i.Input.BackImage) + if err != nil { + return fmt.Errorf("invalid back_image: %v", err) + } + } + + return nil +} + +func (i *Importer) populateTags(ctx context.Context) error { + if len(i.Input.Tags) > 0 { + + tags, err := importTags(ctx, i.TagWriter, i.Input.Tags, i.MissingRefBehaviour) + if err != nil { + return err + } + + for _, p := range tags { + i.group.TagIDs.Add(p.ID) + } + } + + return nil +} + +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { + tags, err := tagWriter.FindByNames(ctx, names, false) + if err != nil { + return nil, err + } + + var pluckedNames []string + for _, tag := range tags { + pluckedNames = append(pluckedNames, tag.Name) + } + + missingTags := sliceutil.Filter(names, func(name string) bool { + return !sliceutil.Contains(pluckedNames, name) + }) + + if len(missingTags) > 0 { + if missingRefBehaviour == models.ImportMissingRefEnumFail { + return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", ")) + } + + if missingRefBehaviour == models.ImportMissingRefEnumCreate { + createdTags, err := createTags(ctx, tagWriter, missingTags) + if err != nil { + return nil, fmt.Errorf("error creating tags: %v", err) + } + + tags = append(tags, createdTags...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + return tags, nil +} + +func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string) ([]*models.Tag, error) { + var ret []*models.Tag + for _, name := range names { + newTag := models.NewTag() + newTag.Name = name + + err := tagWriter.Create(ctx, &newTag) + if err != nil { + return nil, err + } + + ret = append(ret, &newTag) + } + + return ret, nil +} + +func (i *Importer) groupJSONToGroup(groupJSON jsonschema.Group) models.Group { + newGroup := models.Group{ + Name: groupJSON.Name, + Aliases: groupJSON.Aliases, + Director: groupJSON.Director, + Synopsis: groupJSON.Synopsis, + CreatedAt: groupJSON.CreatedAt.GetTime(), + UpdatedAt: groupJSON.UpdatedAt.GetTime(), + + TagIDs: models.NewRelatedIDs([]int{}), + } + + if len(groupJSON.URLs) > 0 { + newGroup.URLs = models.NewRelatedStrings(groupJSON.URLs) + } else if groupJSON.URL != "" { + newGroup.URLs = models.NewRelatedStrings([]string{groupJSON.URL}) + } + if groupJSON.Date != "" { + d, err := models.ParseDate(groupJSON.Date) + if err == nil { + newGroup.Date = &d + } + } + if groupJSON.Rating != 0 { + newGroup.Rating = &groupJSON.Rating + } + + if groupJSON.Duration != 0 { + newGroup.Duration = &groupJSON.Duration + } + + return newGroup +} + +func (i *Importer) populateStudio(ctx context.Context) error { + if i.Input.Studio != "" { + studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false) + if err != nil { + return fmt.Errorf("error finding studio by name: %v", err) + } + + if studio == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return fmt.Errorf("group studio '%s' not found", i.Input.Studio) + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + return nil + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + studioID, err := i.createStudio(ctx, i.Input.Studio) + if err != nil { + return err + } + i.group.StudioID = &studioID + } + } else { + i.group.StudioID = &studio.ID + } + } + + return nil +} + +func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { + newStudio := models.NewStudio() + newStudio.Name = name + + err := i.StudioWriter.Create(ctx, &newStudio) + if err != nil { + return 0, err + } + + return newStudio.ID, nil +} + +func (i *Importer) PostImport(ctx context.Context, id int) error { + subGroups, err := i.getSubGroups(ctx) + if err != nil { + return err + } + + if len(subGroups) > 0 { + if _, err := i.ReaderWriter.UpdatePartial(ctx, id, models.GroupPartial{ + SubGroups: &models.UpdateGroupDescriptions{ + Groups: subGroups, + Mode: models.RelationshipUpdateModeSet, + }, + }); err != nil { + return fmt.Errorf("error setting parents: %v", err) + } + } + + if len(i.frontImageData) > 0 { + if err := i.ReaderWriter.UpdateFrontImage(ctx, id, i.frontImageData); err != nil { + return fmt.Errorf("error setting group front image: %v", err) + } + } + + if len(i.backImageData) > 0 { + if err := i.ReaderWriter.UpdateBackImage(ctx, id, i.backImageData); err != nil { + return fmt.Errorf("error setting group back image: %v", err) + } + } + + return nil +} + +func (i *Importer) Name() string { + return i.Input.Name +} + +func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { + const nocase = false + existing, err := i.ReaderWriter.FindByName(ctx, i.Name(), nocase) + if err != nil { + return nil, err + } + + if existing != nil { + id := existing.ID + return &id, nil + } + + return nil, nil +} + +func (i *Importer) Create(ctx context.Context) (*int, error) { + err := i.ReaderWriter.Create(ctx, &i.group) + if err != nil { + return nil, fmt.Errorf("error creating group: %v", err) + } + + id := i.group.ID + return &id, nil +} + +func (i *Importer) Update(ctx context.Context, id int) error { + group := i.group + group.ID = id + err := i.ReaderWriter.Update(ctx, &group) + if err != nil { + return fmt.Errorf("error updating existing group: %v", err) + } + + return nil +} + +func (i *Importer) getSubGroups(ctx context.Context) ([]models.GroupIDDescription, error) { + var subGroups []models.GroupIDDescription + for _, subGroup := range i.Input.SubGroups { + group, err := i.ReaderWriter.FindByName(ctx, subGroup.Group, false) + if err != nil { + return nil, fmt.Errorf("error finding parent by name: %v", err) + } + + if group == nil { + if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { + return nil, SubGroupNotExistError{missingSubGroup: subGroup.Group} + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { + continue + } + + if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { + parentID, err := i.createSubGroup(ctx, subGroup.Group) + if err != nil { + return nil, err + } + subGroups = append(subGroups, models.GroupIDDescription{ + GroupID: parentID, + Description: subGroup.Description, + }) + } + } else { + subGroups = append(subGroups, models.GroupIDDescription{ + GroupID: group.ID, + Description: subGroup.Description, + }) + } + } + + return subGroups, nil +} + +func (i *Importer) createSubGroup(ctx context.Context, name string) (int, error) { + newGroup := models.NewGroup() + newGroup.Name = name + + err := i.ReaderWriter.Create(ctx, &newGroup) + if err != nil { + return 0, err + } + + return newGroup.ID, nil +} diff --git a/pkg/movie/import_test.go b/pkg/group/import_test.go similarity index 58% rename from pkg/movie/import_test.go rename to pkg/group/import_test.go index d62f5a89004..c4ca47442a5 100644 --- a/pkg/movie/import_test.go +++ b/pkg/group/import_test.go @@ -1,4 +1,4 @@ -package movie +package group import ( "context" @@ -26,13 +26,20 @@ const ( missingStudioName = "existingStudioName" errImageID = 3 + + existingTagID = 105 + errTagsID = 106 + + existingTagName = "existingTagName" + existingTagErr = "existingTagErr" + missingTagName = "missingTagName" ) var testCtx = context.Background() func TestImporterName(t *testing.T) { i := Importer{ - Input: jsonschema.Movie{ + Input: jsonschema.Group{ Name: movieName, }, } @@ -42,7 +49,7 @@ func TestImporterName(t *testing.T) { func TestImporterPreImport(t *testing.T) { i := Importer{ - Input: jsonschema.Movie{ + Input: jsonschema.Group{ Name: movieName, FrontImage: invalidImage, }, @@ -72,9 +79,9 @@ func TestImporterPreImportWithStudio(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - ReaderWriter: db.Movie, + ReaderWriter: db.Group, StudioWriter: db.Studio, - Input: jsonschema.Movie{ + Input: jsonschema.Group{ Name: movieName, FrontImage: frontImage, Studio: existingStudioName, @@ -90,7 +97,7 @@ func TestImporterPreImportWithStudio(t *testing.T) { err := i.PreImport(testCtx) assert.Nil(t, err) - assert.Equal(t, existingStudioID, *i.movie.StudioID) + assert.Equal(t, existingStudioID, *i.group.StudioID) i.Input.Studio = existingStudioErr err = i.PreImport(testCtx) @@ -103,9 +110,9 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - ReaderWriter: db.Movie, + ReaderWriter: db.Group, StudioWriter: db.Studio, - Input: jsonschema.Movie{ + Input: jsonschema.Group{ Name: movieName, FrontImage: frontImage, Studio: missingStudioName, @@ -129,7 +136,7 @@ func TestImporterPreImportWithMissingStudio(t *testing.T) { i.MissingRefBehaviour = models.ImportMissingRefEnumCreate err = i.PreImport(testCtx) assert.Nil(t, err) - assert.Equal(t, existingStudioID, *i.movie.StudioID) + assert.Equal(t, existingStudioID, *i.group.StudioID) db.AssertExpectations(t) } @@ -138,9 +145,9 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - ReaderWriter: db.Movie, + ReaderWriter: db.Group, StudioWriter: db.Studio, - Input: jsonschema.Movie{ + Input: jsonschema.Group{ Name: movieName, FrontImage: frontImage, Studio: missingStudioName, @@ -157,11 +164,102 @@ func TestImporterPreImportWithMissingStudioCreateErr(t *testing.T) { db.AssertExpectations(t) } +func TestImporterPreImportWithTag(t *testing.T) { + db := mocks.NewDatabase() + + i := Importer{ + ReaderWriter: db.Group, + TagWriter: db.Tag, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Group{ + Tags: []string{ + existingTagName, + }, + }, + } + + db.Tag.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{ + { + ID: existingTagID, + Name: existingTagName, + }, + }, nil).Once() + db.Tag.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport(testCtx) + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.group.TagIDs.List()[0]) + + i.Input.Tags = []string{existingTagErr} + err = i.PreImport(testCtx) + assert.NotNil(t, err) + + db.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTag(t *testing.T) { + db := mocks.NewDatabase() + + i := Importer{ + ReaderWriter: db.Group, + TagWriter: db.Tag, + Input: jsonschema.Group{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.Tag) + t.ID = existingTagID + }).Return(nil) + + err := i.PreImport(testCtx) + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport(testCtx) + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport(testCtx) + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.group.TagIDs.List()[0]) + + db.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { + db := mocks.NewDatabase() + + i := Importer{ + ReaderWriter: db.Group, + TagWriter: db.Tag, + Input: jsonschema.Group{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + + err := i.PreImport(testCtx) + assert.NotNil(t, err) + + db.AssertExpectations(t) +} + func TestImporterPostImport(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - ReaderWriter: db.Movie, + ReaderWriter: db.Group, StudioWriter: db.Studio, frontImageData: frontImageBytes, backImageData: backImageBytes, @@ -169,9 +267,9 @@ func TestImporterPostImport(t *testing.T) { updateMovieImageErr := errors.New("UpdateImages error") - db.Movie.On("UpdateFrontImage", testCtx, movieID, frontImageBytes).Return(nil).Once() - db.Movie.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once() - db.Movie.On("UpdateFrontImage", testCtx, errImageID, frontImageBytes).Return(updateMovieImageErr).Once() + db.Group.On("UpdateFrontImage", testCtx, movieID, frontImageBytes).Return(nil).Once() + db.Group.On("UpdateBackImage", testCtx, movieID, backImageBytes).Return(nil).Once() + db.Group.On("UpdateFrontImage", testCtx, errImageID, frontImageBytes).Return(updateMovieImageErr).Once() err := i.PostImport(testCtx, movieID) assert.Nil(t, err) @@ -186,19 +284,19 @@ func TestImporterFindExistingID(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - ReaderWriter: db.Movie, + ReaderWriter: db.Group, StudioWriter: db.Studio, - Input: jsonschema.Movie{ + Input: jsonschema.Group{ Name: movieName, }, } errFindByName := errors.New("FindByName error") - db.Movie.On("FindByName", testCtx, movieName, false).Return(nil, nil).Once() - db.Movie.On("FindByName", testCtx, existingMovieName, false).Return(&models.Movie{ + db.Group.On("FindByName", testCtx, movieName, false).Return(nil, nil).Once() + db.Group.On("FindByName", testCtx, existingMovieName, false).Return(&models.Group{ ID: existingMovieID, }, nil).Once() - db.Movie.On("FindByName", testCtx, movieNameErr, false).Return(nil, errFindByName).Once() + db.Group.On("FindByName", testCtx, movieNameErr, false).Return(nil, errFindByName).Once() id, err := i.FindExistingID(testCtx) assert.Nil(t, id) @@ -220,32 +318,32 @@ func TestImporterFindExistingID(t *testing.T) { func TestCreate(t *testing.T) { db := mocks.NewDatabase() - movie := models.Movie{ + movie := models.Group{ Name: movieName, } - movieErr := models.Movie{ + movieErr := models.Group{ Name: movieNameErr, } i := Importer{ - ReaderWriter: db.Movie, + ReaderWriter: db.Group, StudioWriter: db.Studio, - movie: movie, + group: movie, } errCreate := errors.New("Create error") - db.Movie.On("Create", testCtx, &movie).Run(func(args mock.Arguments) { - m := args.Get(1).(*models.Movie) + db.Group.On("Create", testCtx, &movie).Run(func(args mock.Arguments) { + m := args.Get(1).(*models.Group) m.ID = movieID }).Return(nil).Once() - db.Movie.On("Create", testCtx, &movieErr).Return(errCreate).Once() + db.Group.On("Create", testCtx, &movieErr).Return(errCreate).Once() id, err := i.Create(testCtx) assert.Equal(t, movieID, *id) assert.Nil(t, err) - i.movie = movieErr + i.group = movieErr id, err = i.Create(testCtx) assert.Nil(t, id) assert.NotNil(t, err) @@ -256,34 +354,34 @@ func TestCreate(t *testing.T) { func TestUpdate(t *testing.T) { db := mocks.NewDatabase() - movie := models.Movie{ + movie := models.Group{ Name: movieName, } - movieErr := models.Movie{ + movieErr := models.Group{ Name: movieNameErr, } i := Importer{ - ReaderWriter: db.Movie, + ReaderWriter: db.Group, StudioWriter: db.Studio, - movie: movie, + group: movie, } errUpdate := errors.New("Update error") // id needs to be set for the mock input movie.ID = movieID - db.Movie.On("Update", testCtx, &movie).Return(nil).Once() + db.Group.On("Update", testCtx, &movie).Return(nil).Once() err := i.Update(testCtx, movieID) assert.Nil(t, err) - i.movie = movieErr + i.group = movieErr // need to set id separately movieErr.ID = errImageID - db.Movie.On("Update", testCtx, &movieErr).Return(errUpdate).Once() + db.Group.On("Update", testCtx, &movieErr).Return(errUpdate).Once() err = i.Update(testCtx, errImageID) assert.NotNil(t, err) diff --git a/pkg/group/query.go b/pkg/group/query.go new file mode 100644 index 00000000000..b3adafaf523 --- /dev/null +++ b/pkg/group/query.go @@ -0,0 +1,44 @@ +package group + +import ( + "context" + "strconv" + + "github.com/stashapp/stash/pkg/models" +) + +func CountByStudioID(ctx context.Context, r models.GroupQueryer, id int, depth *int) (int, error) { + filter := &models.GroupFilterType{ + Studios: &models.HierarchicalMultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + Depth: depth, + }, + } + + return r.QueryCount(ctx, filter, nil) +} + +func CountByTagID(ctx context.Context, r models.GroupQueryer, id int, depth *int) (int, error) { + filter := &models.GroupFilterType{ + Tags: &models.HierarchicalMultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + Depth: depth, + }, + } + + return r.QueryCount(ctx, filter, nil) +} + +func CountByContainingGroupID(ctx context.Context, r models.GroupQueryer, id int, depth *int) (int, error) { + filter := &models.GroupFilterType{ + ContainingGroups: &models.HierarchicalMultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + Depth: depth, + }, + } + + return r.QueryCount(ctx, filter, nil) +} diff --git a/pkg/group/reorder.go b/pkg/group/reorder.go new file mode 100644 index 00000000000..b4afd1b0968 --- /dev/null +++ b/pkg/group/reorder.go @@ -0,0 +1,33 @@ +package group + +import ( + "context" + "errors" + + "github.com/stashapp/stash/pkg/models" +) + +var ErrInvalidInsertIndex = errors.New("invalid insert index") + +func (s *Service) ReorderSubGroups(ctx context.Context, groupID int, subGroupIDs []int, insertPointID int, insertAfter bool) error { + // get the group + existing, err := s.Repository.Find(ctx, groupID) + if err != nil { + return err + } + + // ensure it exists + if existing == nil { + return models.ErrNotFound + } + + // TODO - ensure the subgroups exist in the group + + // ensure the insert index is valid + if insertPointID < 0 { + return ErrInvalidInsertIndex + } + + // reorder the subgroups + return s.Repository.ReorderSubGroups(ctx, groupID, subGroupIDs, insertPointID, insertAfter) +} diff --git a/pkg/group/service.go b/pkg/group/service.go new file mode 100644 index 00000000000..ff6e0354184 --- /dev/null +++ b/pkg/group/service.go @@ -0,0 +1,46 @@ +package group + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" +) + +type CreatorUpdater interface { + models.GroupGetter + models.GroupCreator + models.GroupUpdater + + models.ContainingGroupLoader + models.SubGroupLoader + + AnscestorFinder + SubGroupIDFinder + SubGroupAdder + SubGroupRemover + SubGroupReorderer +} + +type AnscestorFinder interface { + FindInAncestors(ctx context.Context, ascestorIDs []int, ids []int) ([]int, error) +} + +type SubGroupIDFinder interface { + FindSubGroupIDs(ctx context.Context, containingID int, ids []int) ([]int, error) +} + +type SubGroupAdder interface { + AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error +} + +type SubGroupRemover interface { + RemoveSubGroups(ctx context.Context, groupID int, subGroupIDs []int) error +} + +type SubGroupReorderer interface { + ReorderSubGroups(ctx context.Context, groupID int, subGroupIDs []int, insertID int, insertAfter bool) error +} + +type Service struct { + Repository CreatorUpdater +} diff --git a/pkg/group/update.go b/pkg/group/update.go new file mode 100644 index 00000000000..d0bc9602add --- /dev/null +++ b/pkg/group/update.go @@ -0,0 +1,112 @@ +package group + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil" +) + +type SubGroupAlreadyInGroupError struct { + GroupIDs []int +} + +func (e *SubGroupAlreadyInGroupError) Error() string { + return fmt.Sprintf("subgroups with IDs %v already in group", e.GroupIDs) +} + +type ImageInput struct { + Image []byte + Set bool +} + +func (s *Service) UpdatePartial(ctx context.Context, id int, updatedGroup models.GroupPartial, frontImage ImageInput, backImage ImageInput) (*models.Group, error) { + if err := s.validateUpdate(ctx, id, updatedGroup); err != nil { + return nil, err + } + + r := s.Repository + + group, err := r.UpdatePartial(ctx, id, updatedGroup) + if err != nil { + return nil, err + } + + // update image table + if frontImage.Set { + if err := r.UpdateFrontImage(ctx, id, frontImage.Image); err != nil { + return nil, err + } + } + + if backImage.Set { + if err := r.UpdateBackImage(ctx, id, backImage.Image); err != nil { + return nil, err + } + } + + return group, nil +} + +func (s *Service) AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error { + // get the group + existing, err := s.Repository.Find(ctx, groupID) + if err != nil { + return err + } + + // ensure it exists + if existing == nil { + return models.ErrNotFound + } + + // ensure the subgroups aren't already sub-groups of the group + subGroupIDs := sliceutil.Map(subGroups, func(sg models.GroupIDDescription) int { + return sg.GroupID + }) + + existingSubGroupIDs, err := s.Repository.FindSubGroupIDs(ctx, groupID, subGroupIDs) + if err != nil { + return err + } + + if len(existingSubGroupIDs) > 0 { + return &SubGroupAlreadyInGroupError{ + GroupIDs: existingSubGroupIDs, + } + } + + // validate the hierarchy + d := &models.UpdateGroupDescriptions{ + Groups: subGroups, + Mode: models.RelationshipUpdateModeAdd, + } + if err := s.validateUpdateGroupHierarchy(ctx, existing, nil, d); err != nil { + return err + } + + // validate insert index + if insertIndex != nil && *insertIndex < 0 { + return ErrInvalidInsertIndex + } + + // add the subgroups + return s.Repository.AddSubGroups(ctx, groupID, subGroups, insertIndex) +} + +func (s *Service) RemoveSubGroups(ctx context.Context, groupID int, subGroupIDs []int) error { + // get the group + existing, err := s.Repository.Find(ctx, groupID) + if err != nil { + return err + } + + // ensure it exists + if existing == nil { + return models.ErrNotFound + } + + // add the subgroups + return s.Repository.RemoveSubGroups(ctx, groupID, subGroupIDs) +} diff --git a/pkg/group/validate.go b/pkg/group/validate.go new file mode 100644 index 00000000000..723b9f6997a --- /dev/null +++ b/pkg/group/validate.go @@ -0,0 +1,117 @@ +package group + +import ( + "context" + "strings" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil" +) + +func (s *Service) validateCreate(ctx context.Context, group *models.Group) error { + if err := validateName(group.Name); err != nil { + return err + } + + containingIDs := group.ContainingGroups.IDs() + subIDs := group.SubGroups.IDs() + + if err := s.validateGroupHierarchy(ctx, containingIDs, subIDs); err != nil { + return err + } + + return nil +} + +func (s *Service) validateUpdate(ctx context.Context, id int, partial models.GroupPartial) error { + // get the existing group - ensure it exists + existing, err := s.Repository.Find(ctx, id) + if err != nil { + return err + } + + if existing == nil { + return models.ErrNotFound + } + + if partial.Name.Set { + if err := validateName(partial.Name.Value); err != nil { + return err + } + } + + if err := s.validateUpdateGroupHierarchy(ctx, existing, partial.ContainingGroups, partial.SubGroups); err != nil { + return err + } + + return nil +} + +func validateName(n string) error { + // ensure name is not empty + if strings.TrimSpace(n) == "" { + return ErrEmptyName + } + + return nil +} + +func (s *Service) validateGroupHierarchy(ctx context.Context, containingIDs []int, subIDs []int) error { + // only need to validate if both are non-empty + if len(containingIDs) == 0 || len(subIDs) == 0 { + return nil + } + + // ensure none of the containing groups are in the sub groups + found, err := s.Repository.FindInAncestors(ctx, containingIDs, subIDs) + if err != nil { + return err + } + + if len(found) > 0 { + return ErrHierarchyLoop + } + + return nil +} + +func (s *Service) validateUpdateGroupHierarchy(ctx context.Context, existing *models.Group, containingGroups *models.UpdateGroupDescriptions, subGroups *models.UpdateGroupDescriptions) error { + // no need to validate if there are no changes + if containingGroups == nil && subGroups == nil { + return nil + } + + if err := existing.LoadContainingGroupIDs(ctx, s.Repository); err != nil { + return err + } + existingContainingGroups := existing.ContainingGroups.List() + + if err := existing.LoadSubGroupIDs(ctx, s.Repository); err != nil { + return err + } + existingSubGroups := existing.SubGroups.List() + + effectiveContainingGroups := existingContainingGroups + if containingGroups != nil { + effectiveContainingGroups = containingGroups.Apply(existingContainingGroups) + } + + effectiveSubGroups := existingSubGroups + if subGroups != nil { + effectiveSubGroups = subGroups.Apply(existingSubGroups) + } + + containingIDs := idsFromGroupDescriptions(effectiveContainingGroups) + subIDs := idsFromGroupDescriptions(effectiveSubGroups) + + // ensure we haven't set the group as a subgroup of itself + if sliceutil.Contains(containingIDs, existing.ID) || sliceutil.Contains(subIDs, existing.ID) { + return ErrHierarchyLoop + } + + return s.validateGroupHierarchy(ctx, containingIDs, subIDs) +} + +func idsFromGroupDescriptions(v []models.GroupIDDescription) []int { + return sliceutil.Map(v, func(g models.GroupIDDescription) int { return g.GroupID }) +} diff --git a/pkg/image/query.go b/pkg/image/query.go index a5c9a17322f..b9b9e662892 100644 --- a/pkg/image/query.go +++ b/pkg/image/query.go @@ -7,6 +7,19 @@ import ( "github.com/stashapp/stash/pkg/models" ) +type Queryer interface { + Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) +} + +type CoverQueryer interface { + Queryer + CoverByGalleryID(ctx context.Context, galleryId int) (*models.Image, error) +} + +type QueryCounter interface { + QueryCount(ctx context.Context, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (int, error) +} + // QueryOptions returns a ImageQueryResult populated with the provided filters. func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType, count bool) models.ImageQueryOptions { return models.ImageQueryOptions{ @@ -19,7 +32,7 @@ func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFi } // Query queries for images using the provided filters. -func Query(ctx context.Context, qb models.ImageQueryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { +func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { result, err := qb.Query(ctx, QueryOptions(imageFilter, findFilter, false)) if err != nil { return nil, err @@ -33,7 +46,7 @@ func Query(ctx context.Context, qb models.ImageQueryer, imageFilter *models.Imag return images, nil } -func CountByPerformerID(ctx context.Context, r models.ImageQueryer, id int) (int, error) { +func CountByPerformerID(ctx context.Context, r QueryCounter, id int) (int, error) { filter := &models.ImageFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -44,7 +57,7 @@ func CountByPerformerID(ctx context.Context, r models.ImageQueryer, id int) (int return r.QueryCount(ctx, filter, nil) } -func CountByStudioID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r QueryCounter, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -56,7 +69,7 @@ func CountByStudioID(ctx context.Context, r models.ImageQueryer, id int, depth * return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r QueryCounter, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -68,7 +81,7 @@ func CountByTagID(ctx context.Context, r models.ImageQueryer, id int, depth *int return r.QueryCount(ctx, filter, nil) } -func FindByGalleryID(ctx context.Context, r models.ImageQueryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { +func FindByGalleryID(ctx context.Context, r Queryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { perPage := -1 findFilter := models.FindFilterType{ @@ -91,7 +104,7 @@ func FindByGalleryID(ctx context.Context, r models.ImageQueryer, galleryID int, }, &findFilter) } -func FindGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { +func FindGalleryCover(ctx context.Context, r CoverQueryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { const useCoverJpg = true img, err := findGalleryCover(ctx, r, galleryID, useCoverJpg, galleryCoverRegex) if err != nil { @@ -106,7 +119,14 @@ func FindGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, return findGalleryCover(ctx, r, galleryID, !useCoverJpg, galleryCoverRegex) } -func findGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { +func findGalleryCover(ctx context.Context, r CoverQueryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { + img, err := r.CoverByGalleryID(ctx, galleryID) + if err != nil { + return nil, err + } else if img != nil { + return img, nil + } + // try to find cover.jpg in the gallery perPage := 1 sortBy := "path" diff --git a/pkg/image/service.go b/pkg/image/service.go index 55dc7686d1c..cb0b24ec320 100644 --- a/pkg/image/service.go +++ b/pkg/image/service.go @@ -1,3 +1,5 @@ +// Package image provides the application logic for images. +// The functionality is exposed via the [Service] type. package image import ( diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go index 16191fa55bb..c65cfc77eb4 100644 --- a/pkg/image/thumbnail.go +++ b/pkg/image/thumbnail.go @@ -31,7 +31,7 @@ var ( type ThumbnailEncoder struct { FFMpeg *ffmpeg.FFMpeg - FFProbe ffmpeg.FFProbe + FFProbe *ffmpeg.FFProbe ClipPreviewOptions ClipPreviewOptions vips *vipsEncoder } @@ -49,7 +49,7 @@ func GetVipsPath() string { return vipsPath } -func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg, ffProbe ffmpeg.FFProbe, clipPreviewOptions ClipPreviewOptions) ThumbnailEncoder { +func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg, ffProbe *ffmpeg.FFProbe, clipPreviewOptions ClipPreviewOptions) ThumbnailEncoder { ret := ThumbnailEncoder{ FFMpeg: ffmpegEncoder, FFProbe: ffProbe, diff --git a/pkg/javascript/vm.go b/pkg/javascript/vm.go index 556edb1d6cc..2ac4b59dc1f 100644 --- a/pkg/javascript/vm.go +++ b/pkg/javascript/vm.go @@ -1,8 +1,8 @@ +// Package javascript provides the javascript runtime for the application. package javascript import ( "fmt" - "net/http" "os" "reflect" @@ -10,12 +10,9 @@ import ( "github.com/stashapp/stash/pkg/logger" ) +// VM is a wrapper around goja.Runtime. type VM struct { *goja.Runtime - - Progress chan float64 - SessionCookie *http.Cookie - GQLHandler http.Handler } // optionalFieldNameMapper wraps a goja.FieldNameMapper and returns the field name if the wrapped mapper returns an empty string. diff --git a/pkg/job/job.go b/pkg/job/job.go index fa1ef3c912b..48b5e7b136e 100644 --- a/pkg/job/job.go +++ b/pkg/job/job.go @@ -1,3 +1,4 @@ +// Package job provides the job execution and management functionality for the application. package job import ( diff --git a/pkg/match/cache.go b/pkg/match/cache.go index 6946f65dbbc..002d67116ca 100644 --- a/pkg/match/cache.go +++ b/pkg/match/cache.go @@ -98,10 +98,12 @@ func getSingleLetterTags(ctx context.Context, c *Cache, reader models.TagAutoTag Value: singleFirstCharacterRegex, Modifier: models.CriterionModifierMatchesRegex, }, - Or: &models.TagFilterType{ - Aliases: &models.StringCriterionInput{ - Value: singleFirstCharacterRegex, - Modifier: models.CriterionModifierMatchesRegex, + OperatorFilter: models.OperatorFilter[models.TagFilterType]{ + Or: &models.TagFilterType{ + Aliases: &models.StringCriterionInput{ + Value: singleFirstCharacterRegex, + Modifier: models.CriterionModifierMatchesRegex, + }, }, }, }, &models.FindFilterType{ diff --git a/pkg/match/path.go b/pkg/match/path.go index 171d9a5308a..1755e701262 100644 --- a/pkg/match/path.go +++ b/pkg/match/path.go @@ -1,3 +1,4 @@ +// Package match provides functions for matching paths to models. package match import ( diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go index 0b8a8d69615..637d48bfce7 100644 --- a/pkg/match/scraped.go +++ b/pkg/match/scraped.go @@ -16,8 +16,8 @@ type PerformerFinder interface { FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Performer, error) } -type MovieNamesFinder interface { - FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Movie, error) +type GroupNamesFinder interface { + FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Group, error) } // ScrapedPerformer matches the provided performer with the @@ -118,27 +118,27 @@ func ScrapedStudio(ctx context.Context, qb StudioFinder, s *models.ScrapedStudio return nil } -// ScrapedMovie matches the provided movie with the movies -// in the database and sets the ID field if one is found. -func ScrapedMovie(ctx context.Context, qb MovieNamesFinder, m *models.ScrapedMovie) error { - if m.StoredID != nil || m.Name == nil { - return nil +// ScrapedGroup matches the provided movie with the movies +// in the database and returns the ID field if one is found. +func ScrapedGroup(ctx context.Context, qb GroupNamesFinder, storedID *string, name *string) (matchedID *string, err error) { + if storedID != nil || name == nil { + return } - movies, err := qb.FindByNames(ctx, []string{*m.Name}, true) + movies, err := qb.FindByNames(ctx, []string{*name}, true) if err != nil { - return err + return } if len(movies) != 1 { // ignore - cannot match - return nil + return } id := strconv.Itoa(movies[0].ID) - m.StoredID = &id - return nil + matchedID = &id + return } // ScrapedTag matches the provided tag with the tags diff --git a/pkg/models/doc.go b/pkg/models/doc.go new file mode 100644 index 00000000000..515f5775f64 --- /dev/null +++ b/pkg/models/doc.go @@ -0,0 +1,2 @@ +// Package models provides application models that are used throughout the application. +package models diff --git a/pkg/models/filter.go b/pkg/models/filter.go index 1513b0bbea6..577aef42be9 100644 --- a/pkg/models/filter.go +++ b/pkg/models/filter.go @@ -6,6 +6,27 @@ import ( "strconv" ) +type OperatorFilter[T any] struct { + And *T `json:"AND"` + Or *T `json:"OR"` + Not *T `json:"NOT"` +} + +// SubFilter returns the subfilter of the operator filter. +// Only one of And, Or, or Not should be set, so it returns the first of these that are not nil. +func (f *OperatorFilter[T]) SubFilter() *T { + if f.And != nil { + return f.And + } + if f.Or != nil { + return f.Or + } + if f.Not != nil { + return f.Not + } + return nil +} + type CriterionModifier string const ( diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 0145ff5e629..73fa287d2e4 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -1,9 +1,7 @@ package models type GalleryFilterType struct { - And *GalleryFilterType `json:"AND"` - Or *GalleryFilterType `json:"OR"` - Not *GalleryFilterType `json:"NOT"` + OperatorFilter[GalleryFilterType] ID *IntCriterionInput `json:"id"` Title *StringCriterionInput `json:"title"` Code *StringCriterionInput `json:"code"` @@ -51,6 +49,16 @@ type GalleryFilterType struct { URL *StringCriterionInput `json:"url"` // Filter by date Date *DateCriterionInput `json:"date"` + // Filter by related scenes that meet this criteria + ScenesFilter *SceneFilterType `json:"scenes_filter"` + // Filter by related images that meet this criteria + ImagesFilter *ImageFilterType `json:"images_filter"` + // Filter by related performers that meet this criteria + PerformersFilter *PerformerFilterType `json:"performers_filter"` + // Filter by related studios that meet this criteria + StudiosFilter *StudioFilterType `json:"studios_filter"` + // Filter by related tags that meet this criteria + TagsFilter *TagFilterType `json:"tags_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at diff --git a/pkg/models/movie.go b/pkg/models/group.go similarity index 51% rename from pkg/models/movie.go rename to pkg/models/group.go index c4afab0e507..6afda3f4890 100644 --- a/pkg/models/movie.go +++ b/pkg/models/group.go @@ -1,6 +1,7 @@ package models -type MovieFilterType struct { +type GroupFilterType struct { + OperatorFilter[GroupFilterType] Name *StringCriterionInput `json:"name"` Director *StringCriterionInput `json:"director"` Synopsis *StringCriterionInput `json:"synopsis"` @@ -16,8 +17,24 @@ type MovieFilterType struct { URL *StringCriterionInput `json:"url"` // Filter to only include movies where performer appears in a scene Performers *MultiCriterionInput `json:"performers"` + // Filter to only include performers with these tags + Tags *HierarchicalMultiCriterionInput `json:"tags"` + // Filter by tag count + TagCount *IntCriterionInput `json:"tag_count"` // Filter by date Date *DateCriterionInput `json:"date"` + // Filter by containing groups + ContainingGroups *HierarchicalMultiCriterionInput `json:"containing_groups"` + // Filter by sub groups + SubGroups *HierarchicalMultiCriterionInput `json:"sub_groups"` + // Filter by number of containing groups the group has + ContainingGroupCount *IntCriterionInput `json:"containing_group_count"` + // Filter by number of sub-groups the group has + SubGroupCount *IntCriterionInput `json:"sub_group_count"` + // Filter by related scenes that meet this criteria + ScenesFilter *SceneFilterType `json:"scenes_filter"` + // Filter by related studios that meet this criteria + StudiosFilter *StudioFilterType `json:"studios_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at diff --git a/pkg/models/image.go b/pkg/models/image.go index 8dca7399143..6026070fadf 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -3,9 +3,7 @@ package models import "context" type ImageFilterType struct { - And *ImageFilterType `json:"AND"` - Or *ImageFilterType `json:"OR"` - Not *ImageFilterType `json:"NOT"` + OperatorFilter[ImageFilterType] ID *IntCriterionInput `json:"id"` Title *StringCriterionInput `json:"title"` Code *StringCriterionInput `json:"code"` @@ -51,6 +49,14 @@ type ImageFilterType struct { PerformerAge *IntCriterionInput `json:"performer_age"` // Filter to only include images with these galleries Galleries *MultiCriterionInput `json:"galleries"` + // Filter by related galleries that meet this criteria + GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related performers that meet this criteria + PerformersFilter *PerformerFilterType `json:"performers_filter"` + // Filter by related studios that meet this criteria + StudiosFilter *StudioFilterType `json:"studios_filter"` + // Filter by related tags that meet this criteria + TagsFilter *TagFilterType `json:"tags_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at diff --git a/pkg/models/json/json_time.go b/pkg/models/json/json_time.go index 134bc69c6fb..20ef9b4426f 100644 --- a/pkg/models/json/json_time.go +++ b/pkg/models/json/json_time.go @@ -1,3 +1,4 @@ +// Package json provides generic JSON types. package json import ( diff --git a/pkg/models/jsonschema/doc.go b/pkg/models/jsonschema/doc.go new file mode 100644 index 00000000000..d19852ac10d --- /dev/null +++ b/pkg/models/jsonschema/doc.go @@ -0,0 +1,2 @@ +// Package jsonschema provides the JSON schema models used for importing and exporting data. +package jsonschema diff --git a/pkg/models/jsonschema/group.go b/pkg/models/jsonschema/group.go new file mode 100644 index 00000000000..b284dab6e77 --- /dev/null +++ b/pkg/models/jsonschema/group.go @@ -0,0 +1,86 @@ +package jsonschema + +import ( + "fmt" + "os" + + jsoniter "github.com/json-iterator/go" + + "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models/json" +) + +type SubGroupDescription struct { + Group string `json:"name,omitempty"` + Description string `json:"description,omitempty"` +} + +type Group struct { + Name string `json:"name,omitempty"` + Aliases string `json:"aliases,omitempty"` + Duration int `json:"duration,omitempty"` + Date string `json:"date,omitempty"` + Rating int `json:"rating,omitempty"` + Director string `json:"director,omitempty"` + Synopsis string `json:"synopsis,omitempty"` + FrontImage string `json:"front_image,omitempty"` + BackImage string `json:"back_image,omitempty"` + URLs []string `json:"urls,omitempty"` + Studio string `json:"studio,omitempty"` + Tags []string `json:"tags,omitempty"` + SubGroups []SubGroupDescription `json:"sub_groups,omitempty"` + CreatedAt json.JSONTime `json:"created_at,omitempty"` + UpdatedAt json.JSONTime `json:"updated_at,omitempty"` + + // deprecated - for import only + URL string `json:"url,omitempty"` +} + +func (s Group) Filename() string { + return fsutil.SanitiseBasename(s.Name) + ".json" +} + +// Backwards Compatible synopsis for the movie +type MovieSynopsisBC struct { + Synopsis string `json:"sypnopsis,omitempty"` +} + +func LoadGroupFile(filePath string) (*Group, error) { + var movie Group + file, err := os.Open(filePath) + if err != nil { + return nil, err + } + defer file.Close() + var json = jsoniter.ConfigCompatibleWithStandardLibrary + jsonParser := json.NewDecoder(file) + err = jsonParser.Decode(&movie) + if err != nil { + return nil, err + } + if movie.Synopsis == "" { + // keep backwards compatibility with pre #2664 builds + // attempt to get the synopsis from the alternate (sypnopsis) key + + _, err = file.Seek(0, 0) // seek to start of file + if err == nil { + var synopsis MovieSynopsisBC + err = jsonParser.Decode(&synopsis) + if err == nil { + movie.Synopsis = synopsis.Synopsis + if movie.Synopsis != "" { + logger.Debug("Movie synopsis retrieved from alternate key") + } + } + } + } + return &movie, nil +} + +func SaveGroupFile(filePath string, movie *Group) error { + if movie == nil { + return fmt.Errorf("movie must not be nil") + } + return marshalToFile(filePath, movie) +} diff --git a/pkg/models/jsonschema/movie.go b/pkg/models/jsonschema/movie.go deleted file mode 100644 index d787f8288af..00000000000 --- a/pkg/models/jsonschema/movie.go +++ /dev/null @@ -1,76 +0,0 @@ -package jsonschema - -import ( - "fmt" - "os" - - jsoniter "github.com/json-iterator/go" - - "github.com/stashapp/stash/pkg/fsutil" - "github.com/stashapp/stash/pkg/logger" - "github.com/stashapp/stash/pkg/models/json" -) - -type Movie struct { - Name string `json:"name,omitempty"` - Aliases string `json:"aliases,omitempty"` - Duration int `json:"duration,omitempty"` - Date string `json:"date,omitempty"` - Rating int `json:"rating,omitempty"` - Director string `json:"director,omitempty"` - Synopsis string `json:"synopsis,omitempty"` - FrontImage string `json:"front_image,omitempty"` - BackImage string `json:"back_image,omitempty"` - URL string `json:"url,omitempty"` - Studio string `json:"studio,omitempty"` - CreatedAt json.JSONTime `json:"created_at,omitempty"` - UpdatedAt json.JSONTime `json:"updated_at,omitempty"` -} - -func (s Movie) Filename() string { - return fsutil.SanitiseBasename(s.Name) + ".json" -} - -// Backwards Compatible synopsis for the movie -type MovieSynopsisBC struct { - Synopsis string `json:"sypnopsis,omitempty"` -} - -func LoadMovieFile(filePath string) (*Movie, error) { - var movie Movie - file, err := os.Open(filePath) - if err != nil { - return nil, err - } - defer file.Close() - var json = jsoniter.ConfigCompatibleWithStandardLibrary - jsonParser := json.NewDecoder(file) - err = jsonParser.Decode(&movie) - if err != nil { - return nil, err - } - if movie.Synopsis == "" { - // keep backwards compatibility with pre #2664 builds - // attempt to get the synopsis from the alternate (sypnopsis) key - - _, err = file.Seek(0, 0) // seek to start of file - if err == nil { - var synopsis MovieSynopsisBC - err = jsonParser.Decode(&synopsis) - if err == nil { - movie.Synopsis = synopsis.Synopsis - if movie.Synopsis != "" { - logger.Debug("Movie synopsis retrieved from alternate key") - } - } - } - } - return &movie, nil -} - -func SaveMovieFile(filePath string, movie *Movie) error { - if movie == nil { - return fmt.Errorf("movie must not be nil") - } - return marshalToFile(filePath, movie) -} diff --git a/pkg/models/jsonschema/performer.go b/pkg/models/jsonschema/performer.go index 248cf955736..7ffa69983b4 100644 --- a/pkg/models/jsonschema/performer.go +++ b/pkg/models/jsonschema/performer.go @@ -34,16 +34,14 @@ func (s *StringOrStringList) UnmarshalJSON(data []byte) error { } type Performer struct { - Name string `json:"name,omitempty"` - Disambiguation string `json:"disambiguation,omitempty"` - Gender string `json:"gender,omitempty"` - URL string `json:"url,omitempty"` - Twitter string `json:"twitter,omitempty"` - Instagram string `json:"instagram,omitempty"` - Birthdate string `json:"birthdate,omitempty"` - Ethnicity string `json:"ethnicity,omitempty"` - Country string `json:"country,omitempty"` - EyeColor string `json:"eye_color,omitempty"` + Name string `json:"name,omitempty"` + Disambiguation string `json:"disambiguation,omitempty"` + Gender string `json:"gender,omitempty"` + URLs []string `json:"urls,omitempty"` + Birthdate string `json:"birthdate,omitempty"` + Ethnicity string `json:"ethnicity,omitempty"` + Country string `json:"country,omitempty"` + EyeColor string `json:"eye_color,omitempty"` // this should be int, but keeping string for backwards compatibility Height string `json:"height,omitempty"` Measurements string `json:"measurements,omitempty"` @@ -66,6 +64,11 @@ type Performer struct { Weight int `json:"weight,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + + // deprecated - for import only + URL string `json:"url,omitempty"` + Twitter string `json:"twitter,omitempty"` + Instagram string `json:"instagram,omitempty"` } func (s Performer) Filename() string { diff --git a/pkg/models/jsonschema/scene.go b/pkg/models/jsonschema/scene.go index 9c59610ab79..757f520b869 100644 --- a/pkg/models/jsonschema/scene.go +++ b/pkg/models/jsonschema/scene.go @@ -33,8 +33,8 @@ type SceneFile struct { Bitrate int `json:"bitrate"` } -type SceneMovie struct { - MovieName string `json:"movieName,omitempty"` +type SceneGroup struct { + GroupName string `json:"movieName,omitempty"` SceneIndex int `json:"scene_index,omitempty"` } @@ -58,7 +58,7 @@ type Scene struct { Director string `json:"director,omitempty"` Galleries []GalleryRef `json:"galleries,omitempty"` Performers []string `json:"performers,omitempty"` - Movies []SceneMovie `json:"movies,omitempty"` + Groups []SceneGroup `json:"movies,omitempty"` Tags []string `json:"tags,omitempty"` Markers []SceneMarker `json:"markers,omitempty"` Files []string `json:"files,omitempty"` diff --git a/pkg/models/jsonschema/studio.go b/pkg/models/jsonschema/studio.go index 84842fa14e3..80ed97d9294 100644 --- a/pkg/models/jsonschema/studio.go +++ b/pkg/models/jsonschema/studio.go @@ -22,6 +22,7 @@ type Studio struct { Details string `json:"details,omitempty"` Aliases []string `json:"aliases,omitempty"` StashIDs []models.StashID `json:"stash_ids,omitempty"` + Tags []string `json:"tags,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` } diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index bd1fbf0d2b9..f07f8a7d902 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -628,6 +628,34 @@ func (_m *GalleryReaderWriter) RemoveImages(ctx context.Context, galleryID int, return r0 } +// ResetCover provides a mock function with given fields: ctx, galleryID +func (_m *GalleryReaderWriter) ResetCover(ctx context.Context, galleryID int) error { + ret := _m.Called(ctx, galleryID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int) error); ok { + r0 = rf(ctx, galleryID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// SetCover provides a mock function with given fields: ctx, galleryID, coverImageID +func (_m *GalleryReaderWriter) SetCover(ctx context.Context, galleryID int, coverImageID int) error { + ret := _m.Called(ctx, galleryID, coverImageID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, int) error); ok { + r0 = rf(ctx, galleryID, coverImageID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGallery func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.Gallery) error { ret := _m.Called(ctx, updatedGallery) diff --git a/pkg/models/mocks/GroupReaderWriter.go b/pkg/models/mocks/GroupReaderWriter.go new file mode 100644 index 00000000000..dc745d09487 --- /dev/null +++ b/pkg/models/mocks/GroupReaderWriter.go @@ -0,0 +1,563 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + + models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" +) + +// GroupReaderWriter is an autogenerated mock type for the GroupReaderWriter type +type GroupReaderWriter struct { + mock.Mock +} + +// All provides a mock function with given fields: ctx +func (_m *GroupReaderWriter) All(ctx context.Context) ([]*models.Group, error) { + ret := _m.Called(ctx) + + var r0 []*models.Group + if rf, ok := ret.Get(0).(func(context.Context) []*models.Group); ok { + r0 = rf(ctx) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Count provides a mock function with given fields: ctx +func (_m *GroupReaderWriter) Count(ctx context.Context) (int, error) { + ret := _m.Called(ctx) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context) int); ok { + r0 = rf(ctx) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context) error); ok { + r1 = rf(ctx) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CountByPerformerID provides a mock function with given fields: ctx, performerID +func (_m *GroupReaderWriter) CountByPerformerID(ctx context.Context, performerID int) (int, error) { + ret := _m.Called(ctx, performerID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, performerID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, performerID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CountByStudioID provides a mock function with given fields: ctx, studioID +func (_m *GroupReaderWriter) CountByStudioID(ctx context.Context, studioID int) (int, error) { + ret := _m.Called(ctx, studioID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, studioID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, studioID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: ctx, newGroup +func (_m *GroupReaderWriter) Create(ctx context.Context, newGroup *models.Group) error { + ret := _m.Called(ctx, newGroup) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Group) error); ok { + r0 = rf(ctx, newGroup) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Destroy provides a mock function with given fields: ctx, id +func (_m *GroupReaderWriter) Destroy(ctx context.Context, id int) error { + ret := _m.Called(ctx, id) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, id +func (_m *GroupReaderWriter) Find(ctx context.Context, id int) (*models.Group, error) { + ret := _m.Called(ctx, id) + + var r0 *models.Group + if rf, ok := ret.Get(0).(func(context.Context, int) *models.Group); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByName provides a mock function with given fields: ctx, name, nocase +func (_m *GroupReaderWriter) FindByName(ctx context.Context, name string, nocase bool) (*models.Group, error) { + ret := _m.Called(ctx, name, nocase) + + var r0 *models.Group + if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.Group); ok { + r0 = rf(ctx, name, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { + r1 = rf(ctx, name, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByNames provides a mock function with given fields: ctx, names, nocase +func (_m *GroupReaderWriter) FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Group, error) { + ret := _m.Called(ctx, names, nocase) + + var r0 []*models.Group + if rf, ok := ret.Get(0).(func(context.Context, []string, bool) []*models.Group); ok { + r0 = rf(ctx, names, nocase) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string, bool) error); ok { + r1 = rf(ctx, names, nocase) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPerformerID provides a mock function with given fields: ctx, performerID +func (_m *GroupReaderWriter) FindByPerformerID(ctx context.Context, performerID int) ([]*models.Group, error) { + ret := _m.Called(ctx, performerID) + + var r0 []*models.Group + if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Group); ok { + r0 = rf(ctx, performerID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, performerID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByStudioID provides a mock function with given fields: ctx, studioID +func (_m *GroupReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Group, error) { + ret := _m.Called(ctx, studioID) + + var r0 []*models.Group + if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Group); ok { + r0 = rf(ctx, studioID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, studioID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindMany provides a mock function with given fields: ctx, ids +func (_m *GroupReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Group, error) { + ret := _m.Called(ctx, ids) + + var r0 []*models.Group + if rf, ok := ret.Get(0).(func(context.Context, []int) []*models.Group); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetBackImage provides a mock function with given fields: ctx, groupID +func (_m *GroupReaderWriter) GetBackImage(ctx context.Context, groupID int) ([]byte, error) { + ret := _m.Called(ctx, groupID) + + var r0 []byte + if rf, ok := ret.Get(0).(func(context.Context, int) []byte); ok { + r0 = rf(ctx, groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, groupID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetContainingGroupDescriptions provides a mock function with given fields: ctx, id +func (_m *GroupReaderWriter) GetContainingGroupDescriptions(ctx context.Context, id int) ([]models.GroupIDDescription, error) { + ret := _m.Called(ctx, id) + + var r0 []models.GroupIDDescription + if rf, ok := ret.Get(0).(func(context.Context, int) []models.GroupIDDescription); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.GroupIDDescription) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetFrontImage provides a mock function with given fields: ctx, groupID +func (_m *GroupReaderWriter) GetFrontImage(ctx context.Context, groupID int) ([]byte, error) { + ret := _m.Called(ctx, groupID) + + var r0 []byte + if rf, ok := ret.Get(0).(func(context.Context, int) []byte); ok { + r0 = rf(ctx, groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]byte) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, groupID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetSubGroupDescriptions provides a mock function with given fields: ctx, id +func (_m *GroupReaderWriter) GetSubGroupDescriptions(ctx context.Context, id int) ([]models.GroupIDDescription, error) { + ret := _m.Called(ctx, id) + + var r0 []models.GroupIDDescription + if rf, ok := ret.Get(0).(func(context.Context, int) []models.GroupIDDescription); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.GroupIDDescription) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetTagIDs provides a mock function with given fields: ctx, relatedID +func (_m *GroupReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []int + if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]int) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetURLs provides a mock function with given fields: ctx, relatedID +func (_m *GroupReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []string + if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// HasBackImage provides a mock function with given fields: ctx, groupID +func (_m *GroupReaderWriter) HasBackImage(ctx context.Context, groupID int) (bool, error) { + ret := _m.Called(ctx, groupID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, int) bool); ok { + r0 = rf(ctx, groupID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, groupID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// HasFrontImage provides a mock function with given fields: ctx, groupID +func (_m *GroupReaderWriter) HasFrontImage(ctx context.Context, groupID int) (bool, error) { + ret := _m.Called(ctx, groupID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, int) bool); ok { + r0 = rf(ctx, groupID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, groupID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Query provides a mock function with given fields: ctx, groupFilter, findFilter +func (_m *GroupReaderWriter) Query(ctx context.Context, groupFilter *models.GroupFilterType, findFilter *models.FindFilterType) ([]*models.Group, int, error) { + ret := _m.Called(ctx, groupFilter, findFilter) + + var r0 []*models.Group + if rf, ok := ret.Get(0).(func(context.Context, *models.GroupFilterType, *models.FindFilterType) []*models.Group); ok { + r0 = rf(ctx, groupFilter, findFilter) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Group) + } + } + + var r1 int + if rf, ok := ret.Get(1).(func(context.Context, *models.GroupFilterType, *models.FindFilterType) int); ok { + r1 = rf(ctx, groupFilter, findFilter) + } else { + r1 = ret.Get(1).(int) + } + + var r2 error + if rf, ok := ret.Get(2).(func(context.Context, *models.GroupFilterType, *models.FindFilterType) error); ok { + r2 = rf(ctx, groupFilter, findFilter) + } else { + r2 = ret.Error(2) + } + + return r0, r1, r2 +} + +// QueryCount provides a mock function with given fields: ctx, groupFilter, findFilter +func (_m *GroupReaderWriter) QueryCount(ctx context.Context, groupFilter *models.GroupFilterType, findFilter *models.FindFilterType) (int, error) { + ret := _m.Called(ctx, groupFilter, findFilter) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, *models.GroupFilterType, *models.FindFilterType) int); ok { + r0 = rf(ctx, groupFilter, findFilter) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *models.GroupFilterType, *models.FindFilterType) error); ok { + r1 = rf(ctx, groupFilter, findFilter) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: ctx, updatedGroup +func (_m *GroupReaderWriter) Update(ctx context.Context, updatedGroup *models.Group) error { + ret := _m.Called(ctx, updatedGroup) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Group) error); ok { + r0 = rf(ctx, updatedGroup) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateBackImage provides a mock function with given fields: ctx, groupID, backImage +func (_m *GroupReaderWriter) UpdateBackImage(ctx context.Context, groupID int, backImage []byte) error { + ret := _m.Called(ctx, groupID, backImage) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []byte) error); ok { + r0 = rf(ctx, groupID, backImage) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateFrontImage provides a mock function with given fields: ctx, groupID, frontImage +func (_m *GroupReaderWriter) UpdateFrontImage(ctx context.Context, groupID int, frontImage []byte) error { + ret := _m.Called(ctx, groupID, frontImage) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []byte) error); ok { + r0 = rf(ctx, groupID, frontImage) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdatePartial provides a mock function with given fields: ctx, id, updatedGroup +func (_m *GroupReaderWriter) UpdatePartial(ctx context.Context, id int, updatedGroup models.GroupPartial) (*models.Group, error) { + ret := _m.Called(ctx, id, updatedGroup) + + var r0 *models.Group + if rf, ok := ret.Get(0).(func(context.Context, int, models.GroupPartial) *models.Group); ok { + r0 = rf(ctx, id, updatedGroup) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Group) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int, models.GroupPartial) error); ok { + r1 = rf(ctx, id, updatedGroup) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index 5a525857bfe..04fd6690004 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -114,6 +114,29 @@ func (_m *ImageReaderWriter) CountByGalleryID(ctx context.Context, galleryID int return r0, r1 } +// CoverByGalleryID provides a mock function with given fields: ctx, galleryId +func (_m *ImageReaderWriter) CoverByGalleryID(ctx context.Context, galleryId int) (*models.Image, error) { + ret := _m.Called(ctx, galleryId) + + var r0 *models.Image + if rf, ok := ret.Get(0).(func(context.Context, int) *models.Image); ok { + r0 = rf(ctx, galleryId) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, galleryId) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: ctx, newImage, fileIDs func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error { ret := _m.Called(ctx, newImage, fileIDs) @@ -301,6 +324,29 @@ func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) return r0, r1 } +// FindByGalleryIDIndex provides a mock function with given fields: ctx, galleryID, index +func (_m *ImageReaderWriter) FindByGalleryIDIndex(ctx context.Context, galleryID int, index uint) (*models.Image, error) { + ret := _m.Called(ctx, galleryID, index) + + var r0 *models.Image + if rf, ok := ret.Get(0).(func(context.Context, int, uint) *models.Image); ok { + r0 = rf(ctx, galleryID, index) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int, uint) error); ok { + r1 = rf(ctx, galleryID, index) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByZipFileID provides a mock function with given fields: ctx, zipFileID func (_m *ImageReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) { ret := _m.Called(ctx, zipFileID) diff --git a/pkg/models/mocks/MovieReaderWriter.go b/pkg/models/mocks/MovieReaderWriter.go deleted file mode 100644 index edf355e142c..00000000000 --- a/pkg/models/mocks/MovieReaderWriter.go +++ /dev/null @@ -1,471 +0,0 @@ -// Code generated by mockery v2.10.0. DO NOT EDIT. - -package mocks - -import ( - context "context" - - models "github.com/stashapp/stash/pkg/models" - mock "github.com/stretchr/testify/mock" -) - -// MovieReaderWriter is an autogenerated mock type for the MovieReaderWriter type -type MovieReaderWriter struct { - mock.Mock -} - -// All provides a mock function with given fields: ctx -func (_m *MovieReaderWriter) All(ctx context.Context) ([]*models.Movie, error) { - ret := _m.Called(ctx) - - var r0 []*models.Movie - if rf, ok := ret.Get(0).(func(context.Context) []*models.Movie); ok { - r0 = rf(ctx) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Count provides a mock function with given fields: ctx -func (_m *MovieReaderWriter) Count(ctx context.Context) (int, error) { - ret := _m.Called(ctx) - - var r0 int - if rf, ok := ret.Get(0).(func(context.Context) int); ok { - r0 = rf(ctx) - } else { - r0 = ret.Get(0).(int) - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context) error); ok { - r1 = rf(ctx) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// CountByPerformerID provides a mock function with given fields: ctx, performerID -func (_m *MovieReaderWriter) CountByPerformerID(ctx context.Context, performerID int) (int, error) { - ret := _m.Called(ctx, performerID) - - var r0 int - if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { - r0 = rf(ctx, performerID) - } else { - r0 = ret.Get(0).(int) - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, performerID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// CountByStudioID provides a mock function with given fields: ctx, studioID -func (_m *MovieReaderWriter) CountByStudioID(ctx context.Context, studioID int) (int, error) { - ret := _m.Called(ctx, studioID) - - var r0 int - if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { - r0 = rf(ctx, studioID) - } else { - r0 = ret.Get(0).(int) - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, studioID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Create provides a mock function with given fields: ctx, newMovie -func (_m *MovieReaderWriter) Create(ctx context.Context, newMovie *models.Movie) error { - ret := _m.Called(ctx, newMovie) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Movie) error); ok { - r0 = rf(ctx, newMovie) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Destroy provides a mock function with given fields: ctx, id -func (_m *MovieReaderWriter) Destroy(ctx context.Context, id int) error { - ret := _m.Called(ctx, id) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, int) error); ok { - r0 = rf(ctx, id) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// Find provides a mock function with given fields: ctx, id -func (_m *MovieReaderWriter) Find(ctx context.Context, id int) (*models.Movie, error) { - ret := _m.Called(ctx, id) - - var r0 *models.Movie - if rf, ok := ret.Get(0).(func(context.Context, int) *models.Movie); ok { - r0 = rf(ctx, id) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FindByName provides a mock function with given fields: ctx, name, nocase -func (_m *MovieReaderWriter) FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) { - ret := _m.Called(ctx, name, nocase) - - var r0 *models.Movie - if rf, ok := ret.Get(0).(func(context.Context, string, bool) *models.Movie); ok { - r0 = rf(ctx, name, nocase) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { - r1 = rf(ctx, name, nocase) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FindByNames provides a mock function with given fields: ctx, names, nocase -func (_m *MovieReaderWriter) FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Movie, error) { - ret := _m.Called(ctx, names, nocase) - - var r0 []*models.Movie - if rf, ok := ret.Get(0).(func(context.Context, []string, bool) []*models.Movie); ok { - r0 = rf(ctx, names, nocase) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, []string, bool) error); ok { - r1 = rf(ctx, names, nocase) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FindByPerformerID provides a mock function with given fields: ctx, performerID -func (_m *MovieReaderWriter) FindByPerformerID(ctx context.Context, performerID int) ([]*models.Movie, error) { - ret := _m.Called(ctx, performerID) - - var r0 []*models.Movie - if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Movie); ok { - r0 = rf(ctx, performerID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, performerID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FindByStudioID provides a mock function with given fields: ctx, studioID -func (_m *MovieReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Movie, error) { - ret := _m.Called(ctx, studioID) - - var r0 []*models.Movie - if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Movie); ok { - r0 = rf(ctx, studioID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, studioID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// FindMany provides a mock function with given fields: ctx, ids -func (_m *MovieReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Movie, error) { - ret := _m.Called(ctx, ids) - - var r0 []*models.Movie - if rf, ok := ret.Get(0).(func(context.Context, []int) []*models.Movie); ok { - r0 = rf(ctx, ids) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { - r1 = rf(ctx, ids) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetBackImage provides a mock function with given fields: ctx, movieID -func (_m *MovieReaderWriter) GetBackImage(ctx context.Context, movieID int) ([]byte, error) { - ret := _m.Called(ctx, movieID) - - var r0 []byte - if rf, ok := ret.Get(0).(func(context.Context, int) []byte); ok { - r0 = rf(ctx, movieID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, movieID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// GetFrontImage provides a mock function with given fields: ctx, movieID -func (_m *MovieReaderWriter) GetFrontImage(ctx context.Context, movieID int) ([]byte, error) { - ret := _m.Called(ctx, movieID) - - var r0 []byte - if rf, ok := ret.Get(0).(func(context.Context, int) []byte); ok { - r0 = rf(ctx, movieID) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]byte) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, movieID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// HasBackImage provides a mock function with given fields: ctx, movieID -func (_m *MovieReaderWriter) HasBackImage(ctx context.Context, movieID int) (bool, error) { - ret := _m.Called(ctx, movieID) - - var r0 bool - if rf, ok := ret.Get(0).(func(context.Context, int) bool); ok { - r0 = rf(ctx, movieID) - } else { - r0 = ret.Get(0).(bool) - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, movieID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// HasFrontImage provides a mock function with given fields: ctx, movieID -func (_m *MovieReaderWriter) HasFrontImage(ctx context.Context, movieID int) (bool, error) { - ret := _m.Called(ctx, movieID) - - var r0 bool - if rf, ok := ret.Get(0).(func(context.Context, int) bool); ok { - r0 = rf(ctx, movieID) - } else { - r0 = ret.Get(0).(bool) - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, movieID) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Query provides a mock function with given fields: ctx, movieFilter, findFilter -func (_m *MovieReaderWriter) Query(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) { - ret := _m.Called(ctx, movieFilter, findFilter) - - var r0 []*models.Movie - if rf, ok := ret.Get(0).(func(context.Context, *models.MovieFilterType, *models.FindFilterType) []*models.Movie); ok { - r0 = rf(ctx, movieFilter, findFilter) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]*models.Movie) - } - } - - var r1 int - if rf, ok := ret.Get(1).(func(context.Context, *models.MovieFilterType, *models.FindFilterType) int); ok { - r1 = rf(ctx, movieFilter, findFilter) - } else { - r1 = ret.Get(1).(int) - } - - var r2 error - if rf, ok := ret.Get(2).(func(context.Context, *models.MovieFilterType, *models.FindFilterType) error); ok { - r2 = rf(ctx, movieFilter, findFilter) - } else { - r2 = ret.Error(2) - } - - return r0, r1, r2 -} - -// QueryCount provides a mock function with given fields: ctx, movieFilter, findFilter -func (_m *MovieReaderWriter) QueryCount(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) (int, error) { - ret := _m.Called(ctx, movieFilter, findFilter) - - var r0 int - if rf, ok := ret.Get(0).(func(context.Context, *models.MovieFilterType, *models.FindFilterType) int); ok { - r0 = rf(ctx, movieFilter, findFilter) - } else { - r0 = ret.Get(0).(int) - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, *models.MovieFilterType, *models.FindFilterType) error); ok { - r1 = rf(ctx, movieFilter, findFilter) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// Update provides a mock function with given fields: ctx, updatedMovie -func (_m *MovieReaderWriter) Update(ctx context.Context, updatedMovie *models.Movie) error { - ret := _m.Called(ctx, updatedMovie) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Movie) error); ok { - r0 = rf(ctx, updatedMovie) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// UpdateBackImage provides a mock function with given fields: ctx, movieID, backImage -func (_m *MovieReaderWriter) UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error { - ret := _m.Called(ctx, movieID, backImage) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, int, []byte) error); ok { - r0 = rf(ctx, movieID, backImage) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// UpdateFrontImage provides a mock function with given fields: ctx, movieID, frontImage -func (_m *MovieReaderWriter) UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error { - ret := _m.Called(ctx, movieID, frontImage) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, int, []byte) error); ok { - r0 = rf(ctx, movieID, frontImage) - } else { - r0 = ret.Error(0) - } - - return r0 -} - -// UpdatePartial provides a mock function with given fields: ctx, id, updatedMovie -func (_m *MovieReaderWriter) UpdatePartial(ctx context.Context, id int, updatedMovie models.MoviePartial) (*models.Movie, error) { - ret := _m.Called(ctx, id, updatedMovie) - - var r0 *models.Movie - if rf, ok := ret.Get(0).(func(context.Context, int, models.MoviePartial) *models.Movie); ok { - r0 = rf(ctx, id, updatedMovie) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.Movie) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int, models.MoviePartial) error); ok { - r1 = rf(ctx, id, updatedMovie) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 7bbc6ef794e..0f3e2be02b6 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -383,6 +383,29 @@ func (_m *PerformerReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetURLs provides a mock function with given fields: ctx, relatedID +func (_m *PerformerReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []string + if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // HasImage provides a mock function with given fields: ctx, performerID func (_m *PerformerReaderWriter) HasImage(ctx context.Context, performerID int) (bool, error) { ret := _m.Called(ctx, performerID) diff --git a/pkg/models/mocks/SavedFilterReaderWriter.go b/pkg/models/mocks/SavedFilterReaderWriter.go index 65573854682..53f5cb0e50a 100644 --- a/pkg/models/mocks/SavedFilterReaderWriter.go +++ b/pkg/models/mocks/SavedFilterReaderWriter.go @@ -111,29 +111,6 @@ func (_m *SavedFilterReaderWriter) FindByMode(ctx context.Context, mode models.F return r0, r1 } -// FindDefault provides a mock function with given fields: ctx, mode -func (_m *SavedFilterReaderWriter) FindDefault(ctx context.Context, mode models.FilterMode) (*models.SavedFilter, error) { - ret := _m.Called(ctx, mode) - - var r0 *models.SavedFilter - if rf, ok := ret.Get(0).(func(context.Context, models.FilterMode) *models.SavedFilter); ok { - r0 = rf(ctx, mode) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).(*models.SavedFilter) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, models.FilterMode) error); ok { - r1 = rf(ctx, mode) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // FindMany provides a mock function with given fields: ctx, ids, ignoreNotFound func (_m *SavedFilterReaderWriter) FindMany(ctx context.Context, ids []int, ignoreNotFound bool) ([]*models.SavedFilter, error) { ret := _m.Called(ctx, ids, ignoreNotFound) @@ -157,20 +134,6 @@ func (_m *SavedFilterReaderWriter) FindMany(ctx context.Context, ids []int, igno return r0, r1 } -// SetDefault provides a mock function with given fields: ctx, obj -func (_m *SavedFilterReaderWriter) SetDefault(ctx context.Context, obj *models.SavedFilter) error { - ret := _m.Called(ctx, obj) - - var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.SavedFilter) error); ok { - r0 = rf(ctx, obj) - } else { - r0 = ret.Error(0) - } - - return r0 -} - // Update provides a mock function with given fields: ctx, obj func (_m *SavedFilterReaderWriter) Update(ctx context.Context, obj *models.SavedFilter) error { ret := _m.Called(ctx, obj) diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 080e40b0d29..e12ae999c6f 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -190,20 +190,20 @@ func (_m *SceneReaderWriter) CountByFileID(ctx context.Context, fileID models.Fi return r0, r1 } -// CountByMovieID provides a mock function with given fields: ctx, movieID -func (_m *SceneReaderWriter) CountByMovieID(ctx context.Context, movieID int) (int, error) { - ret := _m.Called(ctx, movieID) +// CountByGroupID provides a mock function with given fields: ctx, groupID +func (_m *SceneReaderWriter) CountByGroupID(ctx context.Context, groupID int) (int, error) { + ret := _m.Called(ctx, groupID) var r0 int if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { - r0 = rf(ctx, movieID) + r0 = rf(ctx, groupID) } else { r0 = ret.Get(0).(int) } var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, movieID) + r1 = rf(ctx, groupID) } else { r1 = ret.Error(1) } @@ -589,13 +589,13 @@ func (_m *SceneReaderWriter) FindByGalleryID(ctx context.Context, performerID in return r0, r1 } -// FindByMovieID provides a mock function with given fields: ctx, movieID -func (_m *SceneReaderWriter) FindByMovieID(ctx context.Context, movieID int) ([]*models.Scene, error) { - ret := _m.Called(ctx, movieID) +// FindByGroupID provides a mock function with given fields: ctx, groupID +func (_m *SceneReaderWriter) FindByGroupID(ctx context.Context, groupID int) ([]*models.Scene, error) { + ret := _m.Called(ctx, groupID) var r0 []*models.Scene if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Scene); ok { - r0 = rf(ctx, movieID) + r0 = rf(ctx, groupID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]*models.Scene) @@ -604,7 +604,7 @@ func (_m *SceneReaderWriter) FindByMovieID(ctx context.Context, movieID int) ([] var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, movieID) + r1 = rf(ctx, groupID) } else { r1 = ret.Error(1) } @@ -840,6 +840,29 @@ func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetGroups provides a mock function with given fields: ctx, id +func (_m *SceneReaderWriter) GetGroups(ctx context.Context, id int) ([]models.GroupsScenes, error) { + ret := _m.Called(ctx, id) + + var r0 []models.GroupsScenes + if rf, ok := ret.Get(0).(func(context.Context, int) []models.GroupsScenes); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.GroupsScenes) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetManyFileIDs provides a mock function with given fields: ctx, ids func (_m *SceneReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { ret := _m.Called(ctx, ids) @@ -978,29 +1001,6 @@ func (_m *SceneReaderWriter) GetManyViewDates(ctx context.Context, ids []int) ([ return r0, r1 } -// GetMovies provides a mock function with given fields: ctx, id -func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) { - ret := _m.Called(ctx, id) - - var r0 []models.MoviesScenes - if rf, ok := ret.Get(0).(func(context.Context, int) []models.MoviesScenes); ok { - r0 = rf(ctx, id) - } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]models.MoviesScenes) - } - } - - var r1 error - if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, id) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - // GetOCount provides a mock function with given fields: ctx, id func (_m *SceneReaderWriter) GetOCount(ctx context.Context, id int) (int, error) { ret := _m.Called(ctx, id) @@ -1267,6 +1267,27 @@ func (_m *SceneReaderWriter) QueryCount(ctx context.Context, sceneFilter *models return r0, r1 } +// ResetActivity provides a mock function with given fields: ctx, sceneID, resetResume, resetDuration +func (_m *SceneReaderWriter) ResetActivity(ctx context.Context, sceneID int, resetResume bool, resetDuration bool) (bool, error) { + ret := _m.Called(ctx, sceneID, resetResume, resetDuration) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, int, bool, bool) bool); ok { + r0 = rf(ctx, sceneID, resetResume, resetDuration) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int, bool, bool) error); ok { + r1 = rf(ctx, sceneID, resetResume, resetDuration) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // ResetO provides a mock function with given fields: ctx, id func (_m *SceneReaderWriter) ResetO(ctx context.Context, id int) (int, error) { ret := _m.Called(ctx, id) diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index c46e45d4c24..d4932ca71da 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -58,6 +58,27 @@ func (_m *StudioReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByTagID provides a mock function with given fields: ctx, tagID +func (_m *StudioReaderWriter) CountByTagID(ctx context.Context, tagID int) (int, error) { + ret := _m.Called(ctx, tagID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, tagID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, tagID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: ctx, newStudio func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error { ret := _m.Called(ctx, newStudio) @@ -316,6 +337,29 @@ func (_m *StudioReaderWriter) GetStashIDs(ctx context.Context, relatedID int) ([ return r0, r1 } +// GetTagIDs provides a mock function with given fields: ctx, relatedID +func (_m *StudioReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []int + if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]int) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // HasImage provides a mock function with given fields: ctx, studioID func (_m *StudioReaderWriter) HasImage(ctx context.Context, studioID int) (bool, error) { ret := _m.Called(ctx, studioID) @@ -367,6 +411,27 @@ func (_m *StudioReaderWriter) Query(ctx context.Context, studioFilter *models.St return r0, r1, r2 } +// QueryCount provides a mock function with given fields: ctx, studioFilter, findFilter +func (_m *StudioReaderWriter) QueryCount(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) (int, error) { + ret := _m.Called(ctx, studioFilter, findFilter) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, *models.StudioFilterType, *models.FindFilterType) int); ok { + r0 = rf(ctx, studioFilter, findFilter) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, *models.StudioFilterType, *models.FindFilterType) error); ok { + r1 = rf(ctx, studioFilter, findFilter) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // QueryForAutoTag provides a mock function with given fields: ctx, words func (_m *StudioReaderWriter) QueryForAutoTag(ctx context.Context, words []string) ([]*models.Studio, error) { ret := _m.Called(ctx, words) diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index 9b610e49b6e..a285b97bf00 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -243,6 +243,29 @@ func (_m *TagReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) ( return r0, r1 } +// FindByGroupID provides a mock function with given fields: ctx, groupID +func (_m *TagReaderWriter) FindByGroupID(ctx context.Context, groupID int) ([]*models.Tag, error) { + ret := _m.Called(ctx, groupID) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Tag); ok { + r0 = rf(ctx, groupID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, groupID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByImageID provides a mock function with given fields: ctx, imageID func (_m *TagReaderWriter) FindByImageID(ctx context.Context, imageID int) ([]*models.Tag, error) { ret := _m.Called(ctx, imageID) @@ -404,6 +427,29 @@ func (_m *TagReaderWriter) FindBySceneMarkerID(ctx context.Context, sceneMarkerI return r0, r1 } +// FindByStudioID provides a mock function with given fields: ctx, studioID +func (_m *TagReaderWriter) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) { + ret := _m.Called(ctx, studioID) + + var r0 []*models.Tag + if rf, ok := ret.Get(0).(func(context.Context, int) []*models.Tag); ok { + r0 = rf(ctx, studioID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Tag) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, studioID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ctx, ids func (_m *TagReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Tag, error) { ret := _m.Called(ctx, ids) @@ -450,6 +496,29 @@ func (_m *TagReaderWriter) GetAliases(ctx context.Context, relatedID int) ([]str return r0, r1 } +// GetChildIDs provides a mock function with given fields: ctx, relatedID +func (_m *TagReaderWriter) GetChildIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []int + if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]int) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, tagID func (_m *TagReaderWriter) GetImage(ctx context.Context, tagID int) ([]byte, error) { ret := _m.Called(ctx, tagID) @@ -473,6 +542,29 @@ func (_m *TagReaderWriter) GetImage(ctx context.Context, tagID int) ([]byte, err return r0, r1 } +// GetParentIDs provides a mock function with given fields: ctx, relatedID +func (_m *TagReaderWriter) GetParentIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []int + if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]int) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // HasImage provides a mock function with given fields: ctx, tagID func (_m *TagReaderWriter) HasImage(ctx context.Context, tagID int) (bool, error) { ret := _m.Called(ctx, tagID) diff --git a/pkg/models/mocks/database.go b/pkg/models/mocks/database.go index 83d2cbfabad..ec4177b305b 100644 --- a/pkg/models/mocks/database.go +++ b/pkg/models/mocks/database.go @@ -1,3 +1,4 @@ +// Package mocks provides mocks for various interfaces in [models]. package mocks import ( @@ -14,7 +15,7 @@ type Database struct { Gallery *GalleryReaderWriter GalleryChapter *GalleryChapterReaderWriter Image *ImageReaderWriter - Movie *MovieReaderWriter + Group *GroupReaderWriter Performer *PerformerReaderWriter Scene *SceneReaderWriter SceneMarker *SceneMarkerReaderWriter @@ -63,7 +64,7 @@ func NewDatabase() *Database { Gallery: &GalleryReaderWriter{}, GalleryChapter: &GalleryChapterReaderWriter{}, Image: &ImageReaderWriter{}, - Movie: &MovieReaderWriter{}, + Group: &GroupReaderWriter{}, Performer: &PerformerReaderWriter{}, Scene: &SceneReaderWriter{}, SceneMarker: &SceneMarkerReaderWriter{}, @@ -79,7 +80,7 @@ func (db *Database) AssertExpectations(t mock.TestingT) { db.Gallery.AssertExpectations(t) db.GalleryChapter.AssertExpectations(t) db.Image.AssertExpectations(t) - db.Movie.AssertExpectations(t) + db.Group.AssertExpectations(t) db.Performer.AssertExpectations(t) db.Scene.AssertExpectations(t) db.SceneMarker.AssertExpectations(t) @@ -96,7 +97,7 @@ func (db *Database) Repository() models.Repository { Gallery: db.Gallery, GalleryChapter: db.GalleryChapter, Image: db.Image, - Movie: db.Movie, + Group: db.Group, Performer: db.Performer, Scene: db.Scene, SceneMarker: db.SceneMarker, diff --git a/pkg/models/model_group.go b/pkg/models/model_group.go new file mode 100644 index 00000000000..82c71996ae8 --- /dev/null +++ b/pkg/models/model_group.go @@ -0,0 +1,84 @@ +package models + +import ( + "context" + "time" +) + +type Group struct { + ID int `json:"id"` + Name string `json:"name"` + Aliases string `json:"aliases"` + Duration *int `json:"duration"` + Date *Date `json:"date"` + // Rating expressed in 1-100 scale + Rating *int `json:"rating"` + StudioID *int `json:"studio_id"` + Director string `json:"director"` + Synopsis string `json:"synopsis"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + + URLs RelatedStrings `json:"urls"` + TagIDs RelatedIDs `json:"tag_ids"` + + ContainingGroups RelatedGroupDescriptions `json:"containing_groups"` + SubGroups RelatedGroupDescriptions `json:"sub_groups"` +} + +func NewGroup() Group { + currentTime := time.Now() + return Group{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + +func (m *Group) LoadURLs(ctx context.Context, l URLLoader) error { + return m.URLs.load(func() ([]string, error) { + return l.GetURLs(ctx, m.ID) + }) +} + +func (m *Group) LoadTagIDs(ctx context.Context, l TagIDLoader) error { + return m.TagIDs.load(func() ([]int, error) { + return l.GetTagIDs(ctx, m.ID) + }) +} + +func (m *Group) LoadContainingGroupIDs(ctx context.Context, l ContainingGroupLoader) error { + return m.ContainingGroups.load(func() ([]GroupIDDescription, error) { + return l.GetContainingGroupDescriptions(ctx, m.ID) + }) +} + +func (m *Group) LoadSubGroupIDs(ctx context.Context, l SubGroupLoader) error { + return m.SubGroups.load(func() ([]GroupIDDescription, error) { + return l.GetSubGroupDescriptions(ctx, m.ID) + }) +} + +type GroupPartial struct { + Name OptionalString + Aliases OptionalString + Duration OptionalInt + Date OptionalDate + // Rating expressed in 1-100 scale + Rating OptionalInt + StudioID OptionalInt + Director OptionalString + Synopsis OptionalString + URLs *UpdateStrings + TagIDs *UpdateIDs + ContainingGroups *UpdateGroupDescriptions + SubGroups *UpdateGroupDescriptions + CreatedAt OptionalTime + UpdatedAt OptionalTime +} + +func NewGroupPartial() GroupPartial { + currentTime := time.Now() + return GroupPartial{ + UpdatedAt: NewOptionalTime(currentTime), + } +} diff --git a/pkg/models/model_joins.go b/pkg/models/model_joins.go index da70293c3d3..7b7cae3e46a 100644 --- a/pkg/models/model_joins.go +++ b/pkg/models/model_joins.go @@ -5,54 +5,54 @@ import ( "strconv" ) -type MoviesScenes struct { - MovieID int `json:"movie_id"` +type GroupsScenes struct { + GroupID int `json:"movie_id"` // SceneID int `json:"scene_id"` SceneIndex *int `json:"scene_index"` } -func (s MoviesScenes) SceneMovieInput() SceneMovieInput { +func (s GroupsScenes) SceneMovieInput() SceneMovieInput { return SceneMovieInput{ - MovieID: strconv.Itoa(s.MovieID), + MovieID: strconv.Itoa(s.GroupID), SceneIndex: s.SceneIndex, } } -func (s MoviesScenes) Equal(o MoviesScenes) bool { - return o.MovieID == s.MovieID && ((o.SceneIndex == nil && s.SceneIndex == nil) || +func (s GroupsScenes) Equal(o GroupsScenes) bool { + return o.GroupID == s.GroupID && ((o.SceneIndex == nil && s.SceneIndex == nil) || (o.SceneIndex != nil && s.SceneIndex != nil && *o.SceneIndex == *s.SceneIndex)) } -type UpdateMovieIDs struct { - Movies []MoviesScenes `json:"movies"` +type UpdateGroupIDs struct { + Groups []GroupsScenes `json:"movies"` Mode RelationshipUpdateMode `json:"mode"` } -func (u *UpdateMovieIDs) SceneMovieInputs() []SceneMovieInput { +func (u *UpdateGroupIDs) SceneMovieInputs() []SceneMovieInput { if u == nil { return nil } - ret := make([]SceneMovieInput, len(u.Movies)) - for _, id := range u.Movies { + ret := make([]SceneMovieInput, len(u.Groups)) + for _, id := range u.Groups { ret = append(ret, id.SceneMovieInput()) } return ret } -func (u *UpdateMovieIDs) AddUnique(v MoviesScenes) { - for _, vv := range u.Movies { - if vv.MovieID == v.MovieID { +func (u *UpdateGroupIDs) AddUnique(v GroupsScenes) { + for _, vv := range u.Groups { + if vv.GroupID == v.GroupID { return } } - u.Movies = append(u.Movies, v) + u.Groups = append(u.Groups, v) } -func MoviesScenesFromInput(input []SceneMovieInput) ([]MoviesScenes, error) { - ret := make([]MoviesScenes, len(input)) +func GroupsScenesFromInput(input []SceneMovieInput) ([]GroupsScenes, error) { + ret := make([]GroupsScenes, len(input)) for i, v := range input { mID, err := strconv.Atoi(v.MovieID) @@ -60,11 +60,16 @@ func MoviesScenesFromInput(input []SceneMovieInput) ([]MoviesScenes, error) { return nil, fmt.Errorf("invalid movie ID: %s", v.MovieID) } - ret[i] = MoviesScenes{ - MovieID: mID, + ret[i] = GroupsScenes{ + GroupID: mID, SceneIndex: v.SceneIndex, } } return ret, nil } + +type GroupIDDescription struct { + GroupID int `json:"group_id"` + Description string `json:"description"` +} diff --git a/pkg/models/model_movie.go b/pkg/models/model_movie.go deleted file mode 100644 index 5880ff2d137..00000000000 --- a/pkg/models/model_movie.go +++ /dev/null @@ -1,51 +0,0 @@ -package models - -import ( - "time" -) - -type Movie struct { - ID int `json:"id"` - Name string `json:"name"` - Aliases string `json:"aliases"` - Duration *int `json:"duration"` - Date *Date `json:"date"` - // Rating expressed in 1-100 scale - Rating *int `json:"rating"` - StudioID *int `json:"studio_id"` - Director string `json:"director"` - Synopsis string `json:"synopsis"` - URL string `json:"url"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - -func NewMovie() Movie { - currentTime := time.Now() - return Movie{ - CreatedAt: currentTime, - UpdatedAt: currentTime, - } -} - -type MoviePartial struct { - Name OptionalString - Aliases OptionalString - Duration OptionalInt - Date OptionalDate - // Rating expressed in 1-100 scale - Rating OptionalInt - StudioID OptionalInt - Director OptionalString - Synopsis OptionalString - URL OptionalString - CreatedAt OptionalTime - UpdatedAt OptionalTime -} - -func NewMoviePartial() MoviePartial { - currentTime := time.Now() - return MoviePartial{ - UpdatedAt: NewOptionalTime(currentTime), - } -} diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index 09f92e13c6d..85257ba38a4 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -10,9 +10,6 @@ type Performer struct { Name string `json:"name"` Disambiguation string `json:"disambiguation"` Gender *GenderEnum `json:"gender"` - URL string `json:"url"` - Twitter string `json:"twitter"` - Instagram string `json:"instagram"` Birthdate *Date `json:"birthdate"` Ethnicity string `json:"ethnicity"` Country string `json:"country"` @@ -37,6 +34,7 @@ type Performer struct { IgnoreAutoTag bool `json:"ignore_auto_tag"` Aliases RelatedStrings `json:"aliases"` + URLs RelatedStrings `json:"urls"` TagIDs RelatedIDs `json:"tag_ids"` StashIDs RelatedStashIDs `json:"stash_ids"` } @@ -55,9 +53,7 @@ type PerformerPartial struct { Name OptionalString Disambiguation OptionalString Gender OptionalString - URL OptionalString - Twitter OptionalString - Instagram OptionalString + URLs *UpdateStrings Birthdate OptionalDate Ethnicity OptionalString Country OptionalString @@ -99,6 +95,12 @@ func (s *Performer) LoadAliases(ctx context.Context, l AliasLoader) error { }) } +func (s *Performer) LoadURLs(ctx context.Context, l URLLoader) error { + return s.URLs.load(func() ([]string, error) { + return l.GetURLs(ctx, s.ID) + }) +} + func (s *Performer) LoadTagIDs(ctx context.Context, l TagIDLoader) error { return s.TagIDs.load(func() ([]int, error) { return l.GetTagIDs(ctx, s.ID) diff --git a/pkg/models/model_saved_filter.go b/pkg/models/model_saved_filter.go index d680e7c95ef..8c9e7b18d8e 100644 --- a/pkg/models/model_saved_filter.go +++ b/pkg/models/model_saved_filter.go @@ -15,6 +15,7 @@ const ( FilterModeGalleries FilterMode = "GALLERIES" FilterModeSceneMarkers FilterMode = "SCENE_MARKERS" FilterModeMovies FilterMode = "MOVIES" + FilterModeGroups FilterMode = "GROUPS" FilterModeTags FilterMode = "TAGS" FilterModeImages FilterMode = "IMAGES" ) @@ -25,6 +26,7 @@ var AllFilterMode = []FilterMode{ FilterModeStudios, FilterModeGalleries, FilterModeSceneMarkers, + FilterModeGroups, FilterModeMovies, FilterModeTags, FilterModeImages, @@ -32,7 +34,7 @@ var AllFilterMode = []FilterMode{ func (e FilterMode) IsValid() bool { switch e { - case FilterModeScenes, FilterModePerformers, FilterModeStudios, FilterModeGalleries, FilterModeSceneMarkers, FilterModeMovies, FilterModeTags, FilterModeImages: + case FilterModeScenes, FilterModePerformers, FilterModeStudios, FilterModeGalleries, FilterModeSceneMarkers, FilterModeMovies, FilterModeGroups, FilterModeTags, FilterModeImages: return true } return false diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index 7b7770471a4..3f26a8cb6d8 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -41,7 +41,7 @@ type Scene struct { GalleryIDs RelatedIDs `json:"gallery_ids"` TagIDs RelatedIDs `json:"tag_ids"` PerformerIDs RelatedIDs `json:"performer_ids"` - Movies RelatedMovies `json:"movies"` + Groups RelatedGroups `json:"groups"` StashIDs RelatedStashIDs `json:"stash_ids"` } @@ -74,7 +74,7 @@ type ScenePartial struct { GalleryIDs *UpdateIDs TagIDs *UpdateIDs PerformerIDs *UpdateIDs - MovieIDs *UpdateMovieIDs + GroupIDs *UpdateGroupIDs StashIDs *UpdateStashIDs PrimaryFileID *FileID } @@ -139,9 +139,9 @@ func (s *Scene) LoadTagIDs(ctx context.Context, l TagIDLoader) error { }) } -func (s *Scene) LoadMovies(ctx context.Context, l SceneMovieLoader) error { - return s.Movies.load(func() ([]MoviesScenes, error) { - return l.GetMovies(ctx, s.ID) +func (s *Scene) LoadGroups(ctx context.Context, l SceneGroupLoader) error { + return s.Groups.load(func() ([]GroupsScenes, error) { + return l.GetGroups(ctx, s.ID) }) } @@ -168,7 +168,7 @@ func (s *Scene) LoadRelationships(ctx context.Context, l SceneReader) error { return err } - if err := s.LoadMovies(ctx, l); err != nil { + if err := s.LoadGroups(ctx, l); err != nil { return err } @@ -210,7 +210,7 @@ func (s ScenePartial) UpdateInput(id int) SceneUpdateInput { StudioID: s.StudioID.StringPtr(), GalleryIds: s.GalleryIDs.IDStrings(), PerformerIds: s.PerformerIDs.IDStrings(), - Movies: s.MovieIDs.SceneMovieInputs(), + Movies: s.GroupIDs.SceneMovieInputs(), TagIds: s.TagIDs.IDStrings(), StashIds: stashIDs, } diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index cb383c082e7..35f781109cb 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -62,9 +62,9 @@ func (s *ScrapedStudio) GetImage(ctx context.Context, excluded map[string]bool) return nil, nil } -func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) *StudioPartial { +func (s *ScrapedStudio) ToPartial(id string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) StudioPartial { ret := NewStudioPartial() - ret.ID, _ = strconv.Atoi(*id) + ret.ID, _ = strconv.Atoi(id) if s.Name != "" && !excluded["name"] { ret.Name = NewOptionalString(s.Name) @@ -82,8 +82,6 @@ func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[stri ret.ParentID = NewOptionalInt(parentID) } } - } else { - ret.ParentID = NewOptionalIntPtr(nil) } if s.RemoteSiteID != nil && endpoint != "" { @@ -97,7 +95,7 @@ func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[stri }) } - return &ret + return ret } // A performer from a scraping operation... @@ -107,9 +105,10 @@ type ScrapedPerformer struct { Name *string `json:"name"` Disambiguation *string `json:"disambiguation"` Gender *string `json:"gender"` - URL *string `json:"url"` - Twitter *string `json:"twitter"` - Instagram *string `json:"instagram"` + URLs []string `json:"urls"` + URL *string `json:"url"` // deprecated + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated Birthdate *string `json:"birthdate"` Ethnicity *string `json:"ethnicity"` Country *string `json:"country"` @@ -125,7 +124,7 @@ type ScrapedPerformer struct { Aliases *string `json:"aliases"` Tags []*ScrapedTag `json:"tags"` // This should be a base64 encoded data URL - Image *string `json:"image"` + Image *string `json:"image"` // deprecated: use Images Images []string `json:"images"` Details *string `json:"details"` DeathDate *string `json:"death_date"` @@ -191,9 +190,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool ret.Weight = &w } } - if p.Instagram != nil && !excluded["instagram"] { - ret.Instagram = *p.Instagram - } + if p.Measurements != nil && !excluded["measurements"] { ret.Measurements = *p.Measurements } @@ -221,11 +218,27 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool ret.Circumcised = &v } } - if p.Twitter != nil && !excluded["twitter"] { - ret.Twitter = *p.Twitter - } - if p.URL != nil && !excluded["url"] { - ret.URL = *p.URL + + // if URLs are provided, only use those + if len(p.URLs) > 0 { + if !excluded["urls"] { + ret.URLs = NewRelatedStrings(p.URLs) + } + } else { + urls := []string{} + if p.URL != nil && !excluded["url"] { + urls = append(urls, *p.URL) + } + if p.Twitter != nil && !excluded["twitter"] { + urls = append(urls, *p.Twitter) + } + if p.Instagram != nil && !excluded["instagram"] { + urls = append(urls, *p.Instagram) + } + + if len(urls) > 0 { + ret.URLs = NewRelatedStrings(urls) + } } if p.RemoteSiteID != nil && endpoint != "" { @@ -309,9 +322,6 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, ret.Weight = NewOptionalInt(w) } } - if p.Instagram != nil && !excluded["instagram"] { - ret.Instagram = NewOptionalString(*p.Instagram) - } if p.Measurements != nil && !excluded["measurements"] { ret.Measurements = NewOptionalString(*p.Measurements) } @@ -330,11 +340,33 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, if p.Tattoos != nil && !excluded["tattoos"] { ret.Tattoos = NewOptionalString(*p.Tattoos) } - if p.Twitter != nil && !excluded["twitter"] { - ret.Twitter = NewOptionalString(*p.Twitter) - } - if p.URL != nil && !excluded["url"] { - ret.URL = NewOptionalString(*p.URL) + + // if URLs are provided, only use those + if len(p.URLs) > 0 { + if !excluded["urls"] { + ret.URLs = &UpdateStrings{ + Values: p.URLs, + Mode: RelationshipUpdateModeSet, + } + } + } else { + urls := []string{} + if p.URL != nil && !excluded["url"] { + urls = append(urls, *p.URL) + } + if p.Twitter != nil && !excluded["twitter"] { + urls = append(urls, *p.Twitter) + } + if p.Instagram != nil && !excluded["instagram"] { + urls = append(urls, *p.Instagram) + } + + if len(urls) > 0 { + ret.URLs = &UpdateStrings{ + Values: urls, + Mode: RelationshipUpdateModeSet, + } + } } if p.RemoteSiteID != nil && endpoint != "" { @@ -368,13 +400,86 @@ type ScrapedMovie struct { Date *string `json:"date"` Rating *string `json:"rating"` Director *string `json:"director"` - URL *string `json:"url"` + URLs []string `json:"urls"` Synopsis *string `json:"synopsis"` Studio *ScrapedStudio `json:"studio"` + Tags []*ScrapedTag `json:"tags"` // This should be a base64 encoded data URL FrontImage *string `json:"front_image"` // This should be a base64 encoded data URL BackImage *string `json:"back_image"` + + // deprecated + URL *string `json:"url"` } func (ScrapedMovie) IsScrapedContent() {} + +func (m ScrapedMovie) ScrapedGroup() ScrapedGroup { + ret := ScrapedGroup{ + StoredID: m.StoredID, + Name: m.Name, + Aliases: m.Aliases, + Duration: m.Duration, + Date: m.Date, + Rating: m.Rating, + Director: m.Director, + URLs: m.URLs, + Synopsis: m.Synopsis, + Studio: m.Studio, + Tags: m.Tags, + FrontImage: m.FrontImage, + BackImage: m.BackImage, + } + + if len(m.URLs) == 0 && m.URL != nil { + ret.URLs = []string{*m.URL} + } + + return ret +} + +// ScrapedGroup is a group from a scraping operation +type ScrapedGroup struct { + StoredID *string `json:"stored_id"` + Name *string `json:"name"` + Aliases *string `json:"aliases"` + Duration *string `json:"duration"` + Date *string `json:"date"` + Rating *string `json:"rating"` + Director *string `json:"director"` + URLs []string `json:"urls"` + Synopsis *string `json:"synopsis"` + Studio *ScrapedStudio `json:"studio"` + Tags []*ScrapedTag `json:"tags"` + // This should be a base64 encoded data URL + FrontImage *string `json:"front_image"` + // This should be a base64 encoded data URL + BackImage *string `json:"back_image"` +} + +func (ScrapedGroup) IsScrapedContent() {} + +func (g ScrapedGroup) ScrapedMovie() ScrapedMovie { + ret := ScrapedMovie{ + StoredID: g.StoredID, + Name: g.Name, + Aliases: g.Aliases, + Duration: g.Duration, + Date: g.Date, + Rating: g.Rating, + Director: g.Director, + URLs: g.URLs, + Synopsis: g.Synopsis, + Studio: g.Studio, + Tags: g.Tags, + FrontImage: g.FrontImage, + BackImage: g.BackImage, + } + + if len(g.URLs) > 0 { + ret.URL = &g.URLs[0] + } + + return ret +} diff --git a/pkg/models/model_scraped_item_test.go b/pkg/models/model_scraped_item_test.go index a6e42f2fd80..87ce2ad57dc 100644 --- a/pkg/models/model_scraped_item_test.go +++ b/pkg/models/model_scraped_item_test.go @@ -161,9 +161,9 @@ func Test_scrapedToPerformerInput(t *testing.T) { Tattoos: nextVal(), Piercings: nextVal(), Aliases: nextVal(), + URL: nextVal(), Twitter: nextVal(), Instagram: nextVal(), - URL: nextVal(), Details: nextVal(), RemoteSiteID: &remoteSiteID, }, @@ -186,9 +186,7 @@ func Test_scrapedToPerformerInput(t *testing.T) { Tattoos: *nextVal(), Piercings: *nextVal(), Aliases: NewRelatedStrings([]string{*nextVal()}), - Twitter: *nextVal(), - Instagram: *nextVal(), - URL: *nextVal(), + URLs: NewRelatedStrings([]string{*nextVal(), *nextVal(), *nextVal()}), Details: *nextVal(), StashIDs: NewRelatedStashIDs([]StashID{ { @@ -249,3 +247,123 @@ func Test_scrapedToPerformerInput(t *testing.T) { }) } } + +func TestScrapedStudio_ToPartial(t *testing.T) { + var ( + id = 1000 + idStr = strconv.Itoa(id) + storedID = "storedID" + parentStoredID = 2000 + parentStoredIDStr = strconv.Itoa(parentStoredID) + name = "name" + url = "url" + remoteSiteID = "remoteSiteID" + endpoint = "endpoint" + image = "image" + images = []string{image} + + existingEndpoint = "existingEndpoint" + existingStashID = StashID{"existingStashID", existingEndpoint} + existingStashIDs = []StashID{existingStashID} + ) + + fullStudio := ScrapedStudio{ + StoredID: &storedID, + Name: name, + URL: &url, + Parent: &ScrapedStudio{ + StoredID: &parentStoredIDStr, + }, + Image: &image, + Images: images, + RemoteSiteID: &remoteSiteID, + } + + type args struct { + id string + endpoint string + excluded map[string]bool + existingStashIDs []StashID + } + + stdArgs := args{ + id: idStr, + endpoint: endpoint, + excluded: map[string]bool{}, + existingStashIDs: existingStashIDs, + } + + excludeAll := map[string]bool{ + "name": true, + "url": true, + "parent": true, + } + + tests := []struct { + name string + o ScrapedStudio + args args + want StudioPartial + }{ + { + "full no exclusions", + fullStudio, + stdArgs, + StudioPartial{ + ID: id, + Name: NewOptionalString(name), + URL: NewOptionalString(url), + ParentID: NewOptionalInt(parentStoredID), + StashIDs: &UpdateStashIDs{ + StashIDs: append(existingStashIDs, StashID{ + Endpoint: endpoint, + StashID: remoteSiteID, + }), + Mode: RelationshipUpdateModeSet, + }, + }, + }, + { + "exclude all", + fullStudio, + args{ + id: idStr, + excluded: excludeAll, + }, + StudioPartial{ + ID: id, + }, + }, + { + "overwrite stash id", + fullStudio, + args{ + id: idStr, + excluded: excludeAll, + endpoint: existingEndpoint, + existingStashIDs: existingStashIDs, + }, + StudioPartial{ + ID: id, + StashIDs: &UpdateStashIDs{ + StashIDs: []StashID{{ + Endpoint: existingEndpoint, + StashID: remoteSiteID, + }}, + Mode: RelationshipUpdateModeSet, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + s := tt.o + got := s.ToPartial(tt.args.id, tt.args.endpoint, tt.args.excluded, tt.args.existingStashIDs) + + // unset updatedAt - we don't need to compare it + got.UpdatedAt = OptionalTime{} + + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index e6e8b7b205c..0f4a09bc202 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -19,6 +19,7 @@ type Studio struct { IgnoreAutoTag bool `json:"ignore_auto_tag"` Aliases RelatedStrings `json:"aliases"` + TagIDs RelatedIDs `json:"tag_ids"` StashIDs RelatedStashIDs `json:"stash_ids"` } @@ -45,6 +46,7 @@ type StudioPartial struct { IgnoreAutoTag OptionalBool Aliases *UpdateStrings + TagIDs *UpdateIDs StashIDs *UpdateStashIDs } @@ -61,6 +63,12 @@ func (s *Studio) LoadAliases(ctx context.Context, l AliasLoader) error { }) } +func (s *Studio) LoadTagIDs(ctx context.Context, l TagIDLoader) error { + return s.TagIDs.load(func() ([]int, error) { + return l.GetTagIDs(ctx, s.ID) + }) +} + func (s *Studio) LoadStashIDs(ctx context.Context, l StashIDLoader) error { return s.StashIDs.load(func() ([]StashID, error) { return l.GetStashIDs(ctx, s.ID) @@ -72,6 +80,10 @@ func (s *Studio) LoadRelationships(ctx context.Context, l PerformerReader) error return err } + if err := s.LoadTagIDs(ctx, l); err != nil { + return err + } + if err := s.LoadStashIDs(ctx, l); err != nil { return err } diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index 04f5ac1a2ec..e8a797e8760 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -1,6 +1,7 @@ package models import ( + "context" "time" ) @@ -12,6 +13,10 @@ type Tag struct { IgnoreAutoTag bool `json:"ignore_auto_tag"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` + + Aliases RelatedStrings `json:"aliases"` + ParentIDs RelatedIDs `json:"parent_ids"` + ChildIDs RelatedIDs `json:"tag_ids"` } func NewTag() Tag { @@ -22,6 +27,24 @@ func NewTag() Tag { } } +func (s *Tag) LoadAliases(ctx context.Context, l AliasLoader) error { + return s.Aliases.load(func() ([]string, error) { + return l.GetAliases(ctx, s.ID) + }) +} + +func (s *Tag) LoadParentIDs(ctx context.Context, l TagRelationLoader) error { + return s.ParentIDs.load(func() ([]int, error) { + return l.GetParentIDs(ctx, s.ID) + }) +} + +func (s *Tag) LoadChildIDs(ctx context.Context, l TagRelationLoader) error { + return s.ChildIDs.load(func() ([]int, error) { + return l.GetChildIDs(ctx, s.ID) + }) +} + type TagPartial struct { Name OptionalString Description OptionalString @@ -29,6 +52,10 @@ type TagPartial struct { IgnoreAutoTag OptionalBool CreatedAt OptionalTime UpdatedAt OptionalTime + + Aliases *UpdateStrings + ParentIDs *UpdateIDs + ChildIDs *UpdateIDs } func NewTagPartial() TagPartial { diff --git a/pkg/models/paths/paths.go b/pkg/models/paths/paths.go index ed35bca56ba..da72111cffb 100644 --- a/pkg/models/paths/paths.go +++ b/pkg/models/paths/paths.go @@ -1,3 +1,4 @@ +// Package paths provides functions to return paths to various resources. package paths import ( diff --git a/pkg/models/paths/paths_json.go b/pkg/models/paths/paths_json.go index 7f05027c40f..e6e302238f5 100644 --- a/pkg/models/paths/paths_json.go +++ b/pkg/models/paths/paths_json.go @@ -18,7 +18,7 @@ type JSONPaths struct { Galleries string Studios string Tags string - Movies string + Groups string Files string } @@ -31,7 +31,7 @@ func newJSONPaths(baseDir string) *JSONPaths { jp.Images = filepath.Join(baseDir, "images") jp.Galleries = filepath.Join(baseDir, "galleries") jp.Studios = filepath.Join(baseDir, "studios") - jp.Movies = filepath.Join(baseDir, "movies") + jp.Groups = filepath.Join(baseDir, "movies") jp.Tags = filepath.Join(baseDir, "tags") jp.Files = filepath.Join(baseDir, "files") return &jp @@ -49,7 +49,7 @@ func EmptyJSONDirs(baseDir string) { _ = fsutil.EmptyDir(jsonPaths.Galleries) _ = fsutil.EmptyDir(jsonPaths.Performers) _ = fsutil.EmptyDir(jsonPaths.Studios) - _ = fsutil.EmptyDir(jsonPaths.Movies) + _ = fsutil.EmptyDir(jsonPaths.Groups) _ = fsutil.EmptyDir(jsonPaths.Tags) _ = fsutil.EmptyDir(jsonPaths.Files) } @@ -74,8 +74,8 @@ func EnsureJSONDirs(baseDir string) { if err := fsutil.EnsureDir(jsonPaths.Studios); err != nil { logger.Warnf("couldn't create directories for Studios: %v", err) } - if err := fsutil.EnsureDir(jsonPaths.Movies); err != nil { - logger.Warnf("couldn't create directories for Movies: %v", err) + if err := fsutil.EnsureDir(jsonPaths.Groups); err != nil { + logger.Warnf("couldn't create directories for Groups: %v", err) } if err := fsutil.EnsureDir(jsonPaths.Tags); err != nil { logger.Warnf("couldn't create directories for Tags: %v", err) diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 9f5b1b51f49..b14f60044be 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -108,9 +108,7 @@ type CircumcisionCriterionInput struct { } type PerformerFilterType struct { - And *PerformerFilterType `json:"AND"` - Or *PerformerFilterType `json:"OR"` - Not *PerformerFilterType `json:"NOT"` + OperatorFilter[PerformerFilterType] Name *StringCriterionInput `json:"name"` Disambiguation *StringCriterionInput `json:"disambiguation"` Details *StringCriterionInput `json:"details"` @@ -188,6 +186,14 @@ type PerformerFilterType struct { Birthdate *DateCriterionInput `json:"birth_date"` // Filter by death date DeathDate *DateCriterionInput `json:"death_date"` + // Filter by related scenes that meet this criteria + ScenesFilter *SceneFilterType `json:"scenes_filter"` + // Filter by related images that meet this criteria + ImagesFilter *ImageFilterType `json:"images_filter"` + // Filter by related galleries that meet this criteria + GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related tags that meet this criteria + TagsFilter *TagFilterType `json:"tags_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at @@ -197,7 +203,8 @@ type PerformerFilterType struct { type PerformerCreateInput struct { Name string `json:"name"` Disambiguation *string `json:"disambiguation"` - URL *string `json:"url"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` Gender *GenderEnum `json:"gender"` Birthdate *string `json:"birthdate"` Ethnicity *string `json:"ethnicity"` @@ -214,8 +221,8 @@ type PerformerCreateInput struct { Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` AliasList []string `json:"alias_list"` - Twitter *string `json:"twitter"` - Instagram *string `json:"instagram"` + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated Favorite *bool `json:"favorite"` TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL @@ -233,7 +240,8 @@ type PerformerUpdateInput struct { ID string `json:"id"` Name *string `json:"name"` Disambiguation *string `json:"disambiguation"` - URL *string `json:"url"` + URL *string `json:"url"` // deprecated + Urls []string `json:"urls"` Gender *GenderEnum `json:"gender"` Birthdate *string `json:"birthdate"` Ethnicity *string `json:"ethnicity"` @@ -250,8 +258,8 @@ type PerformerUpdateInput struct { Piercings *string `json:"piercings"` Aliases *string `json:"aliases"` AliasList []string `json:"alias_list"` - Twitter *string `json:"twitter"` - Instagram *string `json:"instagram"` + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated Favorite *bool `json:"favorite"` TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL diff --git a/pkg/models/relationships.go b/pkg/models/relationships.go index 29772890f04..5495f858b17 100644 --- a/pkg/models/relationships.go +++ b/pkg/models/relationships.go @@ -2,6 +2,8 @@ package models import ( "context" + + "github.com/stashapp/stash/pkg/sliceutil" ) type SceneIDLoader interface { @@ -24,12 +26,25 @@ type TagIDLoader interface { GetTagIDs(ctx context.Context, relatedID int) ([]int, error) } +type TagRelationLoader interface { + GetParentIDs(ctx context.Context, relatedID int) ([]int, error) + GetChildIDs(ctx context.Context, relatedID int) ([]int, error) +} + type FileIDLoader interface { GetManyFileIDs(ctx context.Context, ids []int) ([][]FileID, error) } -type SceneMovieLoader interface { - GetMovies(ctx context.Context, id int) ([]MoviesScenes, error) +type SceneGroupLoader interface { + GetGroups(ctx context.Context, id int) ([]GroupsScenes, error) +} + +type ContainingGroupLoader interface { + GetContainingGroupDescriptions(ctx context.Context, id int) ([]GroupIDDescription, error) +} + +type SubGroupLoader interface { + GetSubGroupDescriptions(ctx context.Context, id int) ([]GroupIDDescription, error) } type StashIDLoader interface { @@ -110,50 +125,126 @@ func (r *RelatedIDs) load(fn func() ([]int, error)) error { return nil } -// RelatedMovies represents a list of related Movies. -type RelatedMovies struct { - list []MoviesScenes +// RelatedGroups represents a list of related Groups. +type RelatedGroups struct { + list []GroupsScenes +} + +// NewRelatedGroups returns a loaded RelateGroups object with the provided groups. +// Loaded will return true when called on the returned object if the provided slice is not nil. +func NewRelatedGroups(list []GroupsScenes) RelatedGroups { + return RelatedGroups{ + list: list, + } +} + +// Loaded returns true if the relationship has been loaded. +func (r RelatedGroups) Loaded() bool { + return r.list != nil +} + +func (r RelatedGroups) mustLoaded() { + if !r.Loaded() { + panic("list has not been loaded") + } +} + +// List returns the related Groups. Panics if the relationship has not been loaded. +func (r RelatedGroups) List() []GroupsScenes { + r.mustLoaded() + + return r.list +} + +// Add adds the provided ids to the list. Panics if the relationship has not been loaded. +func (r *RelatedGroups) Add(groups ...GroupsScenes) { + r.mustLoaded() + + r.list = append(r.list, groups...) +} + +// ForID returns the GroupsScenes object for the given group ID. Returns nil if not found. +func (r *RelatedGroups) ForID(id int) *GroupsScenes { + r.mustLoaded() + + for _, v := range r.list { + if v.GroupID == id { + return &v + } + } + + return nil +} + +func (r *RelatedGroups) load(fn func() ([]GroupsScenes, error)) error { + if r.Loaded() { + return nil + } + + ids, err := fn() + if err != nil { + return err + } + + if ids == nil { + ids = []GroupsScenes{} + } + + r.list = ids + + return nil +} + +type RelatedGroupDescriptions struct { + list []GroupIDDescription } -// NewRelatedMovies returns a loaded RelatedMovies object with the provided movies. +// NewRelatedGroups returns a loaded RelateGroups object with the provided groups. // Loaded will return true when called on the returned object if the provided slice is not nil. -func NewRelatedMovies(list []MoviesScenes) RelatedMovies { - return RelatedMovies{ +func NewRelatedGroupDescriptions(list []GroupIDDescription) RelatedGroupDescriptions { + return RelatedGroupDescriptions{ list: list, } } // Loaded returns true if the relationship has been loaded. -func (r RelatedMovies) Loaded() bool { +func (r RelatedGroupDescriptions) Loaded() bool { return r.list != nil } -func (r RelatedMovies) mustLoaded() { +func (r RelatedGroupDescriptions) mustLoaded() { if !r.Loaded() { panic("list has not been loaded") } } -// List returns the related Movies. Panics if the relationship has not been loaded. -func (r RelatedMovies) List() []MoviesScenes { +// List returns the related Groups. Panics if the relationship has not been loaded. +func (r RelatedGroupDescriptions) List() []GroupIDDescription { r.mustLoaded() return r.list } +// List returns the related Groups. Panics if the relationship has not been loaded. +func (r RelatedGroupDescriptions) IDs() []int { + r.mustLoaded() + + return sliceutil.Map(r.list, func(d GroupIDDescription) int { return d.GroupID }) +} + // Add adds the provided ids to the list. Panics if the relationship has not been loaded. -func (r *RelatedMovies) Add(movies ...MoviesScenes) { +func (r *RelatedGroupDescriptions) Add(groups ...GroupIDDescription) { r.mustLoaded() - r.list = append(r.list, movies...) + r.list = append(r.list, groups...) } -// ForID returns the MoviesScenes object for the given movie ID. Returns nil if not found. -func (r *RelatedMovies) ForID(id int) *MoviesScenes { +// ForID returns the GroupsScenes object for the given group ID. Returns nil if not found. +func (r *RelatedGroupDescriptions) ForID(id int) *GroupIDDescription { r.mustLoaded() for _, v := range r.list { - if v.MovieID == id { + if v.GroupID == id { return &v } } @@ -161,7 +252,7 @@ func (r *RelatedMovies) ForID(id int) *MoviesScenes { return nil } -func (r *RelatedMovies) load(fn func() ([]MoviesScenes, error)) error { +func (r *RelatedGroupDescriptions) load(fn func() ([]GroupIDDescription, error)) error { if r.Loaded() { return nil } @@ -172,7 +263,7 @@ func (r *RelatedMovies) load(fn func() ([]MoviesScenes, error)) error { } if ids == nil { - ids = []MoviesScenes{} + ids = []GroupIDDescription{} } r.list = ids diff --git a/pkg/models/repository.go b/pkg/models/repository.go index 3eb9a03d378..9bd1e8cad44 100644 --- a/pkg/models/repository.go +++ b/pkg/models/repository.go @@ -20,7 +20,7 @@ type Repository struct { Gallery GalleryReaderWriter GalleryChapter GalleryChapterReaderWriter Image ImageReaderWriter - Movie MovieReaderWriter + Group GroupReaderWriter Performer PerformerReaderWriter Scene SceneReaderWriter SceneMarker SceneMarkerReaderWriter diff --git a/pkg/models/repository_gallery.go b/pkg/models/repository_gallery.go index 45ad5beb710..0cfb9964fab 100644 --- a/pkg/models/repository_gallery.go +++ b/pkg/models/repository_gallery.go @@ -83,6 +83,8 @@ type GalleryWriter interface { AddFileID(ctx context.Context, id int, fileID FileID) error AddImages(ctx context.Context, galleryID int, imageIDs ...int) error RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error + SetCover(ctx context.Context, galleryID int, coverImageID int) error + ResetCover(ctx context.Context, galleryID int) error } // GalleryReaderWriter provides all gallery methods. diff --git a/pkg/models/repository_group.go b/pkg/models/repository_group.go new file mode 100644 index 00000000000..704390d77b3 --- /dev/null +++ b/pkg/models/repository_group.go @@ -0,0 +1,90 @@ +package models + +import "context" + +// GroupGetter provides methods to get groups by ID. +type GroupGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Group, error) + Find(ctx context.Context, id int) (*Group, error) +} + +// GroupFinder provides methods to find groups. +type GroupFinder interface { + GroupGetter + FindByPerformerID(ctx context.Context, performerID int) ([]*Group, error) + FindByStudioID(ctx context.Context, studioID int) ([]*Group, error) + FindByName(ctx context.Context, name string, nocase bool) (*Group, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Group, error) +} + +// GroupQueryer provides methods to query groups. +type GroupQueryer interface { + Query(ctx context.Context, groupFilter *GroupFilterType, findFilter *FindFilterType) ([]*Group, int, error) + QueryCount(ctx context.Context, groupFilter *GroupFilterType, findFilter *FindFilterType) (int, error) +} + +// GroupCounter provides methods to count groups. +type GroupCounter interface { + Count(ctx context.Context) (int, error) + CountByPerformerID(ctx context.Context, performerID int) (int, error) + CountByStudioID(ctx context.Context, studioID int) (int, error) +} + +// GroupCreator provides methods to create groups. +type GroupCreator interface { + Create(ctx context.Context, newGroup *Group) error +} + +// GroupUpdater provides methods to update groups. +type GroupUpdater interface { + Update(ctx context.Context, updatedGroup *Group) error + UpdatePartial(ctx context.Context, id int, updatedGroup GroupPartial) (*Group, error) + UpdateFrontImage(ctx context.Context, groupID int, frontImage []byte) error + UpdateBackImage(ctx context.Context, groupID int, backImage []byte) error +} + +// GroupDestroyer provides methods to destroy groups. +type GroupDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type GroupCreatorUpdater interface { + GroupCreator + GroupUpdater +} + +type GroupFinderCreator interface { + GroupFinder + GroupCreator +} + +// GroupReader provides all methods to read groups. +type GroupReader interface { + GroupFinder + GroupQueryer + GroupCounter + URLLoader + TagIDLoader + ContainingGroupLoader + SubGroupLoader + + All(ctx context.Context) ([]*Group, error) + GetFrontImage(ctx context.Context, groupID int) ([]byte, error) + HasFrontImage(ctx context.Context, groupID int) (bool, error) + GetBackImage(ctx context.Context, groupID int) ([]byte, error) + HasBackImage(ctx context.Context, groupID int) (bool, error) +} + +// GroupWriter provides all methods to modify groups. +type GroupWriter interface { + GroupCreator + GroupUpdater + GroupDestroyer +} + +// GroupReaderWriter provides all group methods. +type GroupReaderWriter interface { + GroupReader + GroupWriter +} diff --git a/pkg/models/repository_image.go b/pkg/models/repository_image.go index ead05105b45..1d42a84ff6d 100644 --- a/pkg/models/repository_image.go +++ b/pkg/models/repository_image.go @@ -18,6 +18,7 @@ type ImageFinder interface { FindByFolderID(ctx context.Context, fileID FolderID) ([]*Image, error) FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Image, error) FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) + FindByGalleryIDIndex(ctx context.Context, galleryID int, index uint) (*Image, error) } // ImageQueryer provides methods to query images. @@ -26,6 +27,10 @@ type ImageQueryer interface { QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) } +type GalleryCoverFinder interface { + CoverByGalleryID(ctx context.Context, galleryId int) (*Image, error) +} + // ImageCounter provides methods to count images. type ImageCounter interface { Count(ctx context.Context) (int, error) @@ -71,6 +76,8 @@ type ImageReader interface { TagIDLoader FileLoader + GalleryCoverFinder + All(ctx context.Context) ([]*Image, error) Size(ctx context.Context) (float64, error) } diff --git a/pkg/models/repository_movie.go b/pkg/models/repository_movie.go deleted file mode 100644 index 9234ea7a5d1..00000000000 --- a/pkg/models/repository_movie.go +++ /dev/null @@ -1,86 +0,0 @@ -package models - -import "context" - -// MovieGetter provides methods to get movies by ID. -type MovieGetter interface { - // TODO - rename this to Find and remove existing method - FindMany(ctx context.Context, ids []int) ([]*Movie, error) - Find(ctx context.Context, id int) (*Movie, error) -} - -// MovieFinder provides methods to find movies. -type MovieFinder interface { - MovieGetter - FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) - FindByStudioID(ctx context.Context, studioID int) ([]*Movie, error) - FindByName(ctx context.Context, name string, nocase bool) (*Movie, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Movie, error) -} - -// MovieQueryer provides methods to query movies. -type MovieQueryer interface { - Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) - QueryCount(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) (int, error) -} - -// MovieCounter provides methods to count movies. -type MovieCounter interface { - Count(ctx context.Context) (int, error) - CountByPerformerID(ctx context.Context, performerID int) (int, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) -} - -// MovieCreator provides methods to create movies. -type MovieCreator interface { - Create(ctx context.Context, newMovie *Movie) error -} - -// MovieUpdater provides methods to update movies. -type MovieUpdater interface { - Update(ctx context.Context, updatedMovie *Movie) error - UpdatePartial(ctx context.Context, id int, updatedMovie MoviePartial) (*Movie, error) - UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error - UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error -} - -// MovieDestroyer provides methods to destroy movies. -type MovieDestroyer interface { - Destroy(ctx context.Context, id int) error -} - -type MovieCreatorUpdater interface { - MovieCreator - MovieUpdater -} - -type MovieFinderCreator interface { - MovieFinder - MovieCreator -} - -// MovieReader provides all methods to read movies. -type MovieReader interface { - MovieFinder - MovieQueryer - MovieCounter - - All(ctx context.Context) ([]*Movie, error) - GetFrontImage(ctx context.Context, movieID int) ([]byte, error) - HasFrontImage(ctx context.Context, movieID int) (bool, error) - GetBackImage(ctx context.Context, movieID int) ([]byte, error) - HasBackImage(ctx context.Context, movieID int) (bool, error) -} - -// MovieWriter provides all methods to modify movies. -type MovieWriter interface { - MovieCreator - MovieUpdater - MovieDestroyer -} - -// MovieReaderWriter provides all movie methods. -type MovieReaderWriter interface { - MovieReader - MovieWriter -} diff --git a/pkg/models/repository_performer.go b/pkg/models/repository_performer.go index 22ade1d1d7d..3fd93619011 100644 --- a/pkg/models/repository_performer.go +++ b/pkg/models/repository_performer.go @@ -78,6 +78,7 @@ type PerformerReader interface { AliasLoader StashIDLoader TagIDLoader + URLLoader All(ctx context.Context) ([]*Performer, error) GetImage(ctx context.Context, performerID int) ([]byte, error) diff --git a/pkg/models/repository_scene.go b/pkg/models/repository_scene.go index bc01ca691d2..e28347c5b82 100644 --- a/pkg/models/repository_scene.go +++ b/pkg/models/repository_scene.go @@ -23,7 +23,7 @@ type SceneFinder interface { FindByPrimaryFileID(ctx context.Context, fileID FileID) ([]*Scene, error) FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) - FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) + FindByGroupID(ctx context.Context, groupID int) ([]*Scene, error) FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) } @@ -37,10 +37,7 @@ type SceneQueryer interface { type SceneCounter interface { Count(ctx context.Context) (int, error) CountByPerformerID(ctx context.Context, performerID int) (int, error) - CountByMovieID(ctx context.Context, movieID int) (int, error) CountByFileID(ctx context.Context, fileID FileID) (int, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) - CountByTagID(ctx context.Context, tagID int) (int, error) CountMissingChecksum(ctx context.Context) (int, error) CountMissingOSHash(ctx context.Context) (int, error) OCountByPerformerID(ctx context.Context, performerID int) (int, error) @@ -99,7 +96,7 @@ type SceneReader interface { GalleryIDLoader PerformerIDLoader TagIDLoader - SceneMovieLoader + SceneGroupLoader StashIDLoader VideoFileLoader @@ -137,6 +134,7 @@ type SceneWriter interface { OHistoryWriter ViewHistoryWriter SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) + ResetActivity(ctx context.Context, sceneID int, resetResume bool, resetDuration bool) (bool, error) } // SceneReaderWriter provides all scene methods. diff --git a/pkg/models/repository_studio.go b/pkg/models/repository_studio.go index 272bf8fed23..a2b9202f303 100644 --- a/pkg/models/repository_studio.go +++ b/pkg/models/repository_studio.go @@ -22,6 +22,7 @@ type StudioFinder interface { // StudioQueryer provides methods to query studios. type StudioQueryer interface { Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) + QueryCount(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) (int, error) } type StudioAutoTagQueryer interface { @@ -36,6 +37,7 @@ type StudioAutoTagQueryer interface { // StudioCounter provides methods to count studios. type StudioCounter interface { Count(ctx context.Context) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) } // StudioCreator provides methods to create studios. @@ -74,6 +76,7 @@ type StudioReader interface { AliasLoader StashIDLoader + TagIDLoader All(ctx context.Context) ([]*Studio, error) GetImage(ctx context.Context, studioID int) ([]byte, error) diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go index ca8f6971bf7..2b073cae02e 100644 --- a/pkg/models/repository_tag.go +++ b/pkg/models/repository_tag.go @@ -20,7 +20,9 @@ type TagFinder interface { FindByImageID(ctx context.Context, imageID int) ([]*Tag, error) FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error) FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error) + FindByGroupID(ctx context.Context, groupID int) ([]*Tag, error) FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error) + FindByStudioID(ctx context.Context, studioID int) ([]*Tag, error) FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) } @@ -84,6 +86,7 @@ type TagReader interface { TagCounter AliasLoader + TagRelationLoader All(ctx context.Context) ([]*Tag, error) GetImage(ctx context.Context, tagID int) ([]byte, error) diff --git a/pkg/models/saved_filter.go b/pkg/models/saved_filter.go index a8e4f20c330..919f0a1a6c8 100644 --- a/pkg/models/saved_filter.go +++ b/pkg/models/saved_filter.go @@ -7,13 +7,11 @@ type SavedFilterReader interface { Find(ctx context.Context, id int) (*SavedFilter, error) FindMany(ctx context.Context, ids []int, ignoreNotFound bool) ([]*SavedFilter, error) FindByMode(ctx context.Context, mode FilterMode) ([]*SavedFilter, error) - FindDefault(ctx context.Context, mode FilterMode) (*SavedFilter, error) } type SavedFilterWriter interface { Create(ctx context.Context, obj *SavedFilter) error Update(ctx context.Context, obj *SavedFilter) error - SetDefault(ctx context.Context, obj *SavedFilter) error Destroy(ctx context.Context, id int) error } diff --git a/pkg/models/scene.go b/pkg/models/scene.go index c7a87151c6d..48317240276 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -9,9 +9,7 @@ type PHashDuplicationCriterionInput struct { } type SceneFilterType struct { - And *SceneFilterType `json:"AND"` - Or *SceneFilterType `json:"OR"` - Not *SceneFilterType `json:"NOT"` + OperatorFilter[SceneFilterType] ID *IntCriterionInput `json:"id"` Title *StringCriterionInput `json:"title"` Code *StringCriterionInput `json:"code"` @@ -57,6 +55,8 @@ type SceneFilterType struct { IsMissing *string `json:"is_missing"` // Filter to only include scenes with this studio Studios *HierarchicalMultiCriterionInput `json:"studios"` + // Filter to only include scenes with this group + Groups *HierarchicalMultiCriterionInput `json:"groups"` // Filter to only include scenes with this movie Movies *MultiCriterionInput `json:"movies"` // Filter to only include scenes with this gallery @@ -97,6 +97,20 @@ type SceneFilterType struct { LastPlayedAt *TimestampCriterionInput `json:"last_played_at"` // Filter by date Date *DateCriterionInput `json:"date"` + // Filter by related galleries that meet this criteria + GalleriesFilter *GalleryFilterType `json:"galleries_filter"` + // Filter by related performers that meet this criteria + PerformersFilter *PerformerFilterType `json:"performers_filter"` + // Filter by related studios that meet this criteria + StudiosFilter *StudioFilterType `json:"studios_filter"` + // Filter by related tags that meet this criteria + TagsFilter *TagFilterType `json:"tags_filter"` + // Filter by related groups that meet this criteria + GroupsFilter *GroupFilterType `json:"groups_filter"` + // Filter by related movies that meet this criteria + MoviesFilter *GroupFilterType `json:"movies_filter"` + // Filter by related markers that meet this criteria + MarkersFilter *SceneMarkerFilterType `json:"markers_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at @@ -121,11 +135,17 @@ type SceneQueryResult struct { resolveErr error } +// SceneMovieInput is used for groups and movies type SceneMovieInput struct { MovieID string `json:"movie_id"` SceneIndex *int `json:"scene_index"` } +type SceneGroupInput struct { + GroupID string `json:"group_id"` + SceneIndex *int `json:"scene_index"` +} + type SceneCreateInput struct { Title *string `json:"title"` Code *string `json:"code"` @@ -140,6 +160,7 @@ type SceneCreateInput struct { GalleryIds []string `json:"gallery_ids"` PerformerIds []string `json:"performer_ids"` Movies []SceneMovieInput `json:"movies"` + Groups []SceneGroupInput `json:"groups"` TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL CoverImage *string `json:"cover_image"` @@ -167,6 +188,7 @@ type SceneUpdateInput struct { GalleryIds []string `json:"gallery_ids"` PerformerIds []string `json:"performer_ids"` Movies []SceneMovieInput `json:"movies"` + Groups []SceneGroupInput `json:"groups"` TagIds []string `json:"tag_ids"` // This should be a URL or a base64 encoded data URL CoverImage *string `json:"cover_image"` diff --git a/pkg/models/scene_marker.go b/pkg/models/scene_marker.go index 4a10c0e2178..8c4598a6df4 100644 --- a/pkg/models/scene_marker.go +++ b/pkg/models/scene_marker.go @@ -9,6 +9,8 @@ type SceneMarkerFilterType struct { SceneTags *HierarchicalMultiCriterionInput `json:"scene_tags"` // Filter to only include scene markers with these performers Performers *MultiCriterionInput `json:"performers"` + // Filter to only include scene markers from these scenes + Scenes *MultiCriterionInput `json:"scenes"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at @@ -19,6 +21,8 @@ type SceneMarkerFilterType struct { SceneCreatedAt *TimestampCriterionInput `json:"scene_created_at"` // Filter by scenes updated at SceneUpdatedAt *TimestampCriterionInput `json:"scene_updated_at"` + // Filter by related scenes that meet this criteria + SceneFilter *SceneFilterType `json:"scene_filter"` } type MarkerStringsResultType struct { diff --git a/pkg/models/studio.go b/pkg/models/studio.go index 9cc6b907e1d..d5575b7ad3b 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -1,9 +1,7 @@ package models type StudioFilterType struct { - And *StudioFilterType `json:"AND"` - Or *StudioFilterType `json:"OR"` - Not *StudioFilterType `json:"NOT"` + OperatorFilter[StudioFilterType] Name *StringCriterionInput `json:"name"` Details *StringCriterionInput `json:"details"` // Filter to only include studios with this parent studio @@ -16,6 +14,10 @@ type StudioFilterType struct { IsMissing *string `json:"is_missing"` // Filter by rating expressed as 1-100 Rating100 *IntCriterionInput `json:"rating100"` + // Filter to only include studios with these tags + Tags *HierarchicalMultiCriterionInput `json:"tags"` + // Filter by tag count + TagCount *IntCriterionInput `json:"tag_count"` // Filter by favorite Favorite *bool `json:"favorite"` // Filter by scene count @@ -32,6 +34,12 @@ type StudioFilterType struct { ChildCount *IntCriterionInput `json:"child_count"` // Filter by autotag ignore value IgnoreAutoTag *bool `json:"ignore_auto_tag"` + // Filter by related scenes that meet this criteria + ScenesFilter *SceneFilterType `json:"scenes_filter"` + // Filter by related images that meet this criteria + ImagesFilter *ImageFilterType `json:"images_filter"` + // Filter by related galleries that meet this criteria + GalleriesFilter *GalleryFilterType `json:"galleries_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at @@ -49,6 +57,7 @@ type StudioCreateInput struct { Favorite *bool `json:"favorite"` Details *string `json:"details"` Aliases []string `json:"aliases"` + TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` } @@ -64,5 +73,6 @@ type StudioUpdateInput struct { Favorite *bool `json:"favorite"` Details *string `json:"details"` Aliases []string `json:"aliases"` + TagIds []string `json:"tag_ids"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` } diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 710d1953e99..ddab8baf5d6 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -1,9 +1,7 @@ package models type TagFilterType struct { - And *TagFilterType `json:"AND"` - Or *TagFilterType `json:"OR"` - Not *TagFilterType `json:"NOT"` + OperatorFilter[TagFilterType] // Filter by tag name Name *StringCriterionInput `json:"name"` // Filter by tag aliases @@ -22,6 +20,12 @@ type TagFilterType struct { GalleryCount *IntCriterionInput `json:"gallery_count"` // Filter by number of performers with this tag PerformerCount *IntCriterionInput `json:"performer_count"` + // Filter by number of studios with this tag + StudioCount *IntCriterionInput `json:"studio_count"` + // Filter by number of groups with this tag + GroupCount *IntCriterionInput `json:"group_count"` + // Filter by number of movies with this tag + MovieCount *IntCriterionInput `json:"movie_count"` // Filter by number of markers with this tag MarkerCount *IntCriterionInput `json:"marker_count"` // Filter by parent tags @@ -34,6 +38,12 @@ type TagFilterType struct { ChildCount *IntCriterionInput `json:"child_count"` // Filter by autotag ignore value IgnoreAutoTag *bool `json:"ignore_auto_tag"` + // Filter by related scenes that meet this criteria + ScenesFilter *SceneFilterType `json:"scenes_filter"` + // Filter by related images that meet this criteria + ImagesFilter *ImageFilterType `json:"images_filter"` + // Filter by related galleries that meet this criteria + GalleriesFilter *GalleryFilterType `json:"galleries_filter"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at diff --git a/pkg/models/update.go b/pkg/models/update.go index 2302a2e699a..6aaff8c317f 100644 --- a/pkg/models/update.go +++ b/pkg/models/update.go @@ -133,3 +133,68 @@ func applyUpdate[T comparable](values []T, mode RelationshipUpdateMode, existing return nil } + +type UpdateGroupDescriptions struct { + Groups []GroupIDDescription `json:"groups"` + Mode RelationshipUpdateMode `json:"mode"` +} + +// Apply applies the update to a list of existing ids, returning the result. +func (u *UpdateGroupDescriptions) Apply(existing []GroupIDDescription) []GroupIDDescription { + if u == nil { + return existing + } + + switch u.Mode { + case RelationshipUpdateModeAdd: + return u.applyAdd(existing) + case RelationshipUpdateModeRemove: + return u.applyRemove(existing) + case RelationshipUpdateModeSet: + return u.Groups + } + + return nil +} + +func (u *UpdateGroupDescriptions) applyAdd(existing []GroupIDDescription) []GroupIDDescription { + // overwrite any existing values with the same id + ret := append([]GroupIDDescription{}, existing...) + for _, v := range u.Groups { + found := false + for i, vv := range ret { + if vv.GroupID == v.GroupID { + ret[i] = v + found = true + break + } + } + + if !found { + ret = append(ret, v) + } + } + + return ret +} + +func (u *UpdateGroupDescriptions) applyRemove(existing []GroupIDDescription) []GroupIDDescription { + // remove any existing values with the same id + var ret []GroupIDDescription + for _, v := range existing { + found := false + for _, vv := range u.Groups { + if vv.GroupID == v.GroupID { + found = true + break + } + } + + // if not found in the remove list, keep it + if !found { + ret = append(ret, v) + } + } + + return ret +} diff --git a/pkg/movie/import.go b/pkg/movie/import.go deleted file mode 100644 index 8004798ae53..00000000000 --- a/pkg/movie/import.go +++ /dev/null @@ -1,177 +0,0 @@ -package movie - -import ( - "context" - "fmt" - - "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/utils" -) - -type ImporterReaderWriter interface { - models.MovieCreatorUpdater - FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) -} - -type Importer struct { - ReaderWriter ImporterReaderWriter - StudioWriter models.StudioFinderCreator - Input jsonschema.Movie - MissingRefBehaviour models.ImportMissingRefEnum - - movie models.Movie - frontImageData []byte - backImageData []byte -} - -func (i *Importer) PreImport(ctx context.Context) error { - i.movie = i.movieJSONToMovie(i.Input) - - if err := i.populateStudio(ctx); err != nil { - return err - } - - var err error - if len(i.Input.FrontImage) > 0 { - i.frontImageData, err = utils.ProcessBase64Image(i.Input.FrontImage) - if err != nil { - return fmt.Errorf("invalid front_image: %v", err) - } - } - if len(i.Input.BackImage) > 0 { - i.backImageData, err = utils.ProcessBase64Image(i.Input.BackImage) - if err != nil { - return fmt.Errorf("invalid back_image: %v", err) - } - } - - return nil -} - -func (i *Importer) movieJSONToMovie(movieJSON jsonschema.Movie) models.Movie { - newMovie := models.Movie{ - Name: movieJSON.Name, - Aliases: movieJSON.Aliases, - Director: movieJSON.Director, - Synopsis: movieJSON.Synopsis, - URL: movieJSON.URL, - CreatedAt: movieJSON.CreatedAt.GetTime(), - UpdatedAt: movieJSON.UpdatedAt.GetTime(), - } - - if movieJSON.Date != "" { - d, err := models.ParseDate(movieJSON.Date) - if err == nil { - newMovie.Date = &d - } - } - if movieJSON.Rating != 0 { - newMovie.Rating = &movieJSON.Rating - } - - if movieJSON.Duration != 0 { - newMovie.Duration = &movieJSON.Duration - } - - return newMovie -} - -func (i *Importer) populateStudio(ctx context.Context) error { - if i.Input.Studio != "" { - studio, err := i.StudioWriter.FindByName(ctx, i.Input.Studio, false) - if err != nil { - return fmt.Errorf("error finding studio by name: %v", err) - } - - if studio == nil { - if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { - return fmt.Errorf("movie studio '%s' not found", i.Input.Studio) - } - - if i.MissingRefBehaviour == models.ImportMissingRefEnumIgnore { - return nil - } - - if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { - studioID, err := i.createStudio(ctx, i.Input.Studio) - if err != nil { - return err - } - i.movie.StudioID = &studioID - } - } else { - i.movie.StudioID = &studio.ID - } - } - - return nil -} - -func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := models.NewStudio() - newStudio.Name = name - - err := i.StudioWriter.Create(ctx, &newStudio) - if err != nil { - return 0, err - } - - return newStudio.ID, nil -} - -func (i *Importer) PostImport(ctx context.Context, id int) error { - if len(i.frontImageData) > 0 { - if err := i.ReaderWriter.UpdateFrontImage(ctx, id, i.frontImageData); err != nil { - return fmt.Errorf("error setting movie front image: %v", err) - } - } - - if len(i.backImageData) > 0 { - if err := i.ReaderWriter.UpdateBackImage(ctx, id, i.backImageData); err != nil { - return fmt.Errorf("error setting movie back image: %v", err) - } - } - - return nil -} - -func (i *Importer) Name() string { - return i.Input.Name -} - -func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { - const nocase = false - existing, err := i.ReaderWriter.FindByName(ctx, i.Name(), nocase) - if err != nil { - return nil, err - } - - if existing != nil { - id := existing.ID - return &id, nil - } - - return nil, nil -} - -func (i *Importer) Create(ctx context.Context) (*int, error) { - err := i.ReaderWriter.Create(ctx, &i.movie) - if err != nil { - return nil, fmt.Errorf("error creating movie: %v", err) - } - - id := i.movie.ID - return &id, nil -} - -func (i *Importer) Update(ctx context.Context, id int) error { - movie := i.movie - movie.ID = id - err := i.ReaderWriter.Update(ctx, &movie) - if err != nil { - return fmt.Errorf("error updating existing movie: %v", err) - } - - return nil -} diff --git a/pkg/movie/query.go b/pkg/movie/query.go deleted file mode 100644 index 3fac932a03d..00000000000 --- a/pkg/movie/query.go +++ /dev/null @@ -1,20 +0,0 @@ -package movie - -import ( - "context" - "strconv" - - "github.com/stashapp/stash/pkg/models" -) - -func CountByStudioID(ctx context.Context, r models.MovieQueryer, id int, depth *int) (int, error) { - filter := &models.MovieFilterType{ - Studios: &models.HierarchicalMultiCriterionInput{ - Value: []string{strconv.Itoa(id)}, - Modifier: models.CriterionModifierIncludes, - Depth: depth, - }, - } - - return r.QueryCount(ctx, filter, nil) -} diff --git a/pkg/performer/doc.go b/pkg/performer/doc.go new file mode 100644 index 00000000000..67a36f88c4e --- /dev/null +++ b/pkg/performer/doc.go @@ -0,0 +1,2 @@ +// Package performer provides the application logic for performer functionality. +package performer diff --git a/pkg/performer/export.go b/pkg/performer/export.go index 9aec8b34e56..8f720338f3d 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -16,6 +16,7 @@ type ImageAliasStashIDGetter interface { GetImage(ctx context.Context, performerID int) ([]byte, error) models.AliasLoader models.StashIDLoader + models.URLLoader } // ToJSON converts a Performer object into its JSON equivalent. @@ -23,7 +24,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON := jsonschema.Performer{ Name: performer.Name, Disambiguation: performer.Disambiguation, - URL: performer.URL, Ethnicity: performer.Ethnicity, Country: performer.Country, EyeColor: performer.EyeColor, @@ -32,8 +32,6 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode CareerLength: performer.CareerLength, Tattoos: performer.Tattoos, Piercings: performer.Piercings, - Twitter: performer.Twitter, - Instagram: performer.Instagram, Favorite: performer.Favorite, Details: performer.Details, HairColor: performer.HairColor, @@ -78,6 +76,11 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON.Aliases = performer.Aliases.List() + if err := performer.LoadURLs(ctx, reader); err != nil { + return nil, fmt.Errorf("loading performer urls: %w", err) + } + newPerformerJSON.URLs = performer.URLs.List() + if err := performer.LoadStashIDs(ctx, reader); err != nil { return nil, fmt.Errorf("loading performer stash ids: %w", err) } diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index 572634aa6a7..36353b17de7 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -77,7 +77,7 @@ func createFullPerformer(id int, name string) *models.Performer { ID: id, Name: name, Disambiguation: disambiguation, - URL: url, + URLs: models.NewRelatedStrings([]string{url, twitter, instagram}), Aliases: models.NewRelatedStrings(aliases), Birthdate: &birthDate, CareerLength: careerLength, @@ -90,11 +90,9 @@ func createFullPerformer(id int, name string) *models.Performer { Favorite: true, Gender: &genderEnum, Height: &height, - Instagram: instagram, Measurements: measurements, Piercings: piercings, Tattoos: tattoos, - Twitter: twitter, CreatedAt: createTime, UpdatedAt: updateTime, Rating: &rating, @@ -114,6 +112,7 @@ func createEmptyPerformer(id int) models.Performer { CreatedAt: createTime, UpdatedAt: updateTime, Aliases: models.NewRelatedStrings([]string{}), + URLs: models.NewRelatedStrings([]string{}), TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}), } @@ -123,7 +122,7 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer { return &jsonschema.Performer{ Name: name, Disambiguation: disambiguation, - URL: url, + URLs: []string{url, twitter, instagram}, Aliases: aliases, Birthdate: birthDate.String(), CareerLength: careerLength, @@ -136,11 +135,9 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer { Favorite: true, Gender: gender, Height: strconv.Itoa(height), - Instagram: instagram, Measurements: measurements, Piercings: piercings, Tattoos: tattoos, - Twitter: twitter, CreatedAt: json.JSONTime{ Time: createTime, }, @@ -161,6 +158,7 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer { func createEmptyJSONPerformer() *jsonschema.Performer { return &jsonschema.Performer{ Aliases: []string{}, + URLs: []string{}, StashIDs: []models.StashID{}, CreatedAt: json.JSONTime{ Time: createTime, diff --git a/pkg/performer/import.go b/pkg/performer/import.go index afa6cd4bca8..d50384fa3d3 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -188,7 +188,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform newPerformer := models.Performer{ Name: performerJSON.Name, Disambiguation: performerJSON.Disambiguation, - URL: performerJSON.URL, Ethnicity: performerJSON.Ethnicity, Country: performerJSON.Country, EyeColor: performerJSON.EyeColor, @@ -198,8 +197,6 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform Tattoos: performerJSON.Tattoos, Piercings: performerJSON.Piercings, Aliases: models.NewRelatedStrings(performerJSON.Aliases), - Twitter: performerJSON.Twitter, - Instagram: performerJSON.Instagram, Details: performerJSON.Details, HairColor: performerJSON.HairColor, Favorite: performerJSON.Favorite, @@ -211,6 +208,25 @@ func performerJSONToPerformer(performerJSON jsonschema.Performer) models.Perform StashIDs: models.NewRelatedStashIDs(performerJSON.StashIDs), } + if len(performerJSON.URLs) > 0 { + newPerformer.URLs = models.NewRelatedStrings(performerJSON.URLs) + } else { + urls := []string{} + if performerJSON.URL != "" { + urls = append(urls, performerJSON.URL) + } + if performerJSON.Twitter != "" { + urls = append(urls, performerJSON.Twitter) + } + if performerJSON.Instagram != "" { + urls = append(urls, performerJSON.Instagram) + } + + if len(urls) > 0 { + newPerformer.URLs = models.NewRelatedStrings([]string{performerJSON.URL}) + } + } + if performerJSON.Gender != "" { v := models.GenderEnum(performerJSON.Gender) newPerformer.Gender = &v diff --git a/pkg/performer/url.go b/pkg/performer/url.go new file mode 100644 index 00000000000..4b52adad580 --- /dev/null +++ b/pkg/performer/url.go @@ -0,0 +1,18 @@ +package performer + +import ( + "regexp" +) + +var ( + twitterURLRE = regexp.MustCompile(`^https?:\/\/(?:www\.)?twitter\.com\/`) + instagramURLRE = regexp.MustCompile(`^https?:\/\/(?:www\.)?instagram\.com\/`) +) + +func IsTwitterURL(url string) bool { + return twitterURLRE.MatchString(url) +} + +func IsInstagramURL(url string) bool { + return instagramURLRE.MatchString(url) +} diff --git a/pkg/performer/validate.go b/pkg/performer/validate.go index 0106490cf62..68f7a8ef535 100644 --- a/pkg/performer/validate.go +++ b/pkg/performer/validate.go @@ -102,11 +102,15 @@ func validateName(ctx context.Context, name string, disambig string, existingID }, } + modifier := models.CriterionModifierIsNull + if disambig != "" { - performerFilter.Disambiguation = &models.StringCriterionInput{ - Value: disambig, - Modifier: models.CriterionModifierEquals, - } + modifier = models.CriterionModifierEquals + } + + performerFilter.Disambiguation = &models.StringCriterionInput{ + Value: disambig, + Modifier: modifier, } if existingID == nil { diff --git a/pkg/performer/validate_test.go b/pkg/performer/validate_test.go index 778459f1751..33f4b1cec58 100644 --- a/pkg/performer/validate_test.go +++ b/pkg/performer/validate_test.go @@ -15,6 +15,9 @@ func nameFilter(n string) *models.PerformerFilterType { Value: n, Modifier: models.CriterionModifierEquals, }, + Disambiguation: &models.StringCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, } } @@ -41,13 +44,6 @@ func TestValidateName(t *testing.T) { newName = "new name" newDisambig = "new disambiguation" ) - // existing1 := models.Performer{ - // Name: name1, - // } - // existing2 := models.Performer{ - // Name: name2, - // Disambiguation: disambig, - // } pp := 1 findFilter := &models.FindFilterType{ diff --git a/pkg/pkg/pkg.go b/pkg/pkg/pkg.go index 7c2e734ef4b..51c35c3d763 100644 --- a/pkg/pkg/pkg.go +++ b/pkg/pkg/pkg.go @@ -1,3 +1,4 @@ +// Package pkg provides interfaces to interact with the package system used for plugins and scrapers. package pkg import ( diff --git a/pkg/plugin/examples/react-component/src/testReact.scss b/pkg/plugin/examples/react-component/src/testReact.scss index 2ca6631b876..695473795a8 100644 --- a/pkg/plugin/examples/react-component/src/testReact.scss +++ b/pkg/plugin/examples/react-component/src/testReact.scss @@ -33,4 +33,15 @@ .scene-performer-popover .image-thumbnail { margin: 1em; } - \ No newline at end of file + +.example-react-component-custom-overlay { + display: block; + font-weight: 900; + height: 100%; + opacity: 0.25; + position: absolute; + text-align: center; + top: 0; + width: 100%; + z-index: 8; +} \ No newline at end of file diff --git a/pkg/plugin/examples/react-component/src/testReact.tsx b/pkg/plugin/examples/react-component/src/testReact.tsx index 127920eff9b..c29f9c3dd16 100644 --- a/pkg/plugin/examples/react-component/src/testReact.tsx +++ b/pkg/plugin/examples/react-component/src/testReact.tsx @@ -132,10 +132,18 @@ interface IPluginApi { ); } + function Overlays() { + return Custom overlay; + } + PluginApi.patch.instead("SceneCard.Details", function (props: any, _: any, original: any) { return ; }); + PluginApi.patch.instead("SceneCard.Overlays", function (props: any, _: any, original: (props: any) => any) { + return <>{original({...props})}; + }); + const TestPage: React.FC = () => { const componentsLoading = PluginApi.hooks.useLoadComponents([PluginApi.loadableComponents.SceneCard]); diff --git a/pkg/plugin/hook/hooks.go b/pkg/plugin/hook/hooks.go index 1b7d93be4d6..a8235b1834d 100644 --- a/pkg/plugin/hook/hooks.go +++ b/pkg/plugin/hook/hooks.go @@ -26,10 +26,16 @@ const ( GalleryChapterUpdatePost TriggerEnum = "GalleryChapter.Update.Post" GalleryChapterDestroyPost TriggerEnum = "GalleryChapter.Destroy.Post" + // deprecated - use Group hooks instead + // for now, both movie and group hooks will be executed MovieCreatePost TriggerEnum = "Movie.Create.Post" MovieUpdatePost TriggerEnum = "Movie.Update.Post" MovieDestroyPost TriggerEnum = "Movie.Destroy.Post" + GroupCreatePost TriggerEnum = "Group.Create.Post" + GroupUpdatePost TriggerEnum = "Group.Update.Post" + GroupDestroyPost TriggerEnum = "Group.Destroy.Post" + PerformerCreatePost TriggerEnum = "Performer.Create.Post" PerformerUpdatePost TriggerEnum = "Performer.Update.Post" PerformerDestroyPost TriggerEnum = "Performer.Destroy.Post" diff --git a/pkg/plugin/raw.go b/pkg/plugin/raw.go index 6b78451effe..3ed33f37ba0 100644 --- a/pkg/plugin/raw.go +++ b/pkg/plugin/raw.go @@ -76,7 +76,7 @@ func (t *rawPluginTask) Start() error { if err != nil { logger.Warnf("error marshalling raw command input") } - if k, err := io.WriteString(stdin, string(inBytes)); err != nil { + if k, err := stdin.Write(inBytes); err != nil { logger.Warnf("error writing input to plugins stdin (wrote %v bytes out of %v): %v", k, len(string(inBytes)), err) } }() diff --git a/pkg/python/exec.go b/pkg/python/exec.go index 68fd18c88f9..09863529460 100644 --- a/pkg/python/exec.go +++ b/pkg/python/exec.go @@ -1,3 +1,4 @@ +// Package python provides utilities for working with the python executable. package python import ( diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 6c2895c0844..5733c3be5d9 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -167,39 +167,39 @@ func GetDependentTagIDs(ctx context.Context, tags TagFinder, markerReader models return ret, nil } -// GetSceneMoviesJSON returns a slice of SceneMovie JSON representation objects -// corresponding to the provided scene's scene movie relationships. -func GetSceneMoviesJSON(ctx context.Context, movieReader models.MovieGetter, scene *models.Scene) ([]jsonschema.SceneMovie, error) { - sceneMovies := scene.Movies.List() - - var results []jsonschema.SceneMovie - for _, sceneMovie := range sceneMovies { - movie, err := movieReader.Find(ctx, sceneMovie.MovieID) +// GetSceneGroupsJSON returns a slice of SceneGroup JSON representation objects +// corresponding to the provided scene's scene group relationships. +func GetSceneGroupsJSON(ctx context.Context, groupReader models.GroupGetter, scene *models.Scene) ([]jsonschema.SceneGroup, error) { + sceneGroups := scene.Groups.List() + + var results []jsonschema.SceneGroup + for _, sceneGroup := range sceneGroups { + group, err := groupReader.Find(ctx, sceneGroup.GroupID) if err != nil { - return nil, fmt.Errorf("error getting movie: %v", err) + return nil, fmt.Errorf("error getting group: %v", err) } - if movie != nil { - sceneMovieJSON := jsonschema.SceneMovie{ - MovieName: movie.Name, + if group != nil { + sceneGroupJSON := jsonschema.SceneGroup{ + GroupName: group.Name, } - if sceneMovie.SceneIndex != nil { - sceneMovieJSON.SceneIndex = *sceneMovie.SceneIndex + if sceneGroup.SceneIndex != nil { + sceneGroupJSON.SceneIndex = *sceneGroup.SceneIndex } - results = append(results, sceneMovieJSON) + results = append(results, sceneGroupJSON) } } return results, nil } -// GetDependentMovieIDs returns a slice of movie IDs that this scene references. -func GetDependentMovieIDs(ctx context.Context, scene *models.Scene) ([]int, error) { +// GetDependentGroupIDs returns a slice of group IDs that this scene references. +func GetDependentGroupIDs(ctx context.Context, scene *models.Scene) ([]int, error) { var ret []int - m := scene.Movies.List() + m := scene.Groups.List() for _, mm := range m { - ret = append(ret, mm.MovieID) + ret = append(ret, mm.GroupID) } return ret, nil diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index 89fe3dfc62a..cde421bd80e 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -26,8 +26,8 @@ const ( noTagsID = 11 errTagsID = 12 - noMoviesID = 13 - errFindMovieID = 15 + noGroupsID = 13 + errFindGroupID = 15 noMarkersID = 16 errMarkersID = 17 @@ -49,15 +49,15 @@ var ( studioName = "studioName" // galleryChecksum = "galleryChecksum" - validMovie1 = 1 - validMovie2 = 2 - invalidMovie = 3 + validGroup1 = 1 + validGroup2 = 2 + invalidGroup = 3 - movie1Name = "movie1Name" - movie2Name = "movie2Name" + group1Name = "group1Name" + group2Name = "group2Name" - movie1Scene = 1 - movie2Scene = 2 + group1Scene = 1 + group2Scene = 2 ) var names = []string{ @@ -330,82 +330,82 @@ func TestGetTagNames(t *testing.T) { db.AssertExpectations(t) } -type sceneMoviesTestScenario struct { +type sceneGroupsTestScenario struct { input models.Scene - expected []jsonschema.SceneMovie + expected []jsonschema.SceneGroup err bool } -var validMovies = models.NewRelatedMovies([]models.MoviesScenes{ +var validGroups = models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: validMovie1, - SceneIndex: &movie1Scene, + GroupID: validGroup1, + SceneIndex: &group1Scene, }, { - MovieID: validMovie2, - SceneIndex: &movie2Scene, + GroupID: validGroup2, + SceneIndex: &group2Scene, }, }) -var invalidMovies = models.NewRelatedMovies([]models.MoviesScenes{ +var invalidGroups = models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: invalidMovie, - SceneIndex: &movie1Scene, + GroupID: invalidGroup, + SceneIndex: &group1Scene, }, }) -var getSceneMoviesJSONScenarios = []sceneMoviesTestScenario{ +var getSceneGroupsJSONScenarios = []sceneGroupsTestScenario{ { models.Scene{ ID: sceneID, - Movies: validMovies, + Groups: validGroups, }, - []jsonschema.SceneMovie{ + []jsonschema.SceneGroup{ { - MovieName: movie1Name, - SceneIndex: movie1Scene, + GroupName: group1Name, + SceneIndex: group1Scene, }, { - MovieName: movie2Name, - SceneIndex: movie2Scene, + GroupName: group2Name, + SceneIndex: group2Scene, }, }, false, }, { models.Scene{ - ID: noMoviesID, - Movies: models.NewRelatedMovies([]models.MoviesScenes{}), + ID: noGroupsID, + Groups: models.NewRelatedGroups([]models.GroupsScenes{}), }, nil, false, }, { models.Scene{ - ID: errFindMovieID, - Movies: invalidMovies, + ID: errFindGroupID, + Groups: invalidGroups, }, nil, true, }, } -func TestGetSceneMoviesJSON(t *testing.T) { +func TestGetSceneGroupsJSON(t *testing.T) { db := mocks.NewDatabase() - movieErr := errors.New("error getting movie") + groupErr := errors.New("error getting group") - db.Movie.On("Find", testCtx, validMovie1).Return(&models.Movie{ - Name: movie1Name, + db.Group.On("Find", testCtx, validGroup1).Return(&models.Group{ + Name: group1Name, }, nil).Once() - db.Movie.On("Find", testCtx, validMovie2).Return(&models.Movie{ - Name: movie2Name, + db.Group.On("Find", testCtx, validGroup2).Return(&models.Group{ + Name: group2Name, }, nil).Once() - db.Movie.On("Find", testCtx, invalidMovie).Return(nil, movieErr).Once() + db.Group.On("Find", testCtx, invalidGroup).Return(nil, groupErr).Once() - for i, s := range getSceneMoviesJSONScenarios { + for i, s := range getSceneGroupsJSONScenarios { scene := s.input - json, err := GetSceneMoviesJSON(testCtx, db.Movie, &scene) + json, err := GetSceneGroupsJSON(testCtx, db.Group, &scene) switch { case !s.err && err != nil: diff --git a/pkg/scene/filename_parser.go b/pkg/scene/filename_parser.go index 0426696def5..b8dff89d7e5 100644 --- a/pkg/scene/filename_parser.go +++ b/pkg/scene/filename_parser.go @@ -204,7 +204,7 @@ type sceneHolder struct { mm string dd string performers []string - movies []string + groups []string studio string tags []string } @@ -340,7 +340,7 @@ func (h *sceneHolder) setField(field parserField, value interface{}) { case "studio": h.studio = value.(string) case "movie": - h.movies = append(h.movies, value.(string)) + h.groups = append(h.groups, value.(string)) case "tag": h.tags = append(h.tags, value.(string)) case "yyyy": @@ -413,7 +413,7 @@ type FilenameParser struct { repository FilenameParserRepository performerCache map[string]*models.Performer studioCache map[string]*models.Studio - movieCache map[string]*models.Movie + groupCache map[string]*models.Group tagCache map[string]*models.Tag } @@ -427,7 +427,7 @@ func NewFilenameParser(filter *models.FindFilterType, config models.SceneParserI p.performerCache = make(map[string]*models.Performer) p.studioCache = make(map[string]*models.Studio) - p.movieCache = make(map[string]*models.Movie) + p.groupCache = make(map[string]*models.Group) p.tagCache = make(map[string]*models.Tag) p.initWhiteSpaceRegex() @@ -455,7 +455,7 @@ type FilenameParserRepository struct { Scene models.SceneQueryer Performer PerformerNamesFinder Studio models.StudioQueryer - Movie MovieNameFinder + Group GroupNameFinder Tag models.TagQueryer } @@ -464,7 +464,7 @@ func NewFilenameParserRepository(repo models.Repository) FilenameParserRepositor Scene: repo.Scene, Performer: repo.Performer, Studio: repo.Studio, - Movie: repo.Movie, + Group: repo.Group, Tag: repo.Tag, } } @@ -578,23 +578,23 @@ func (p *FilenameParser) queryStudio(ctx context.Context, qb models.StudioQuerye return ret } -type MovieNameFinder interface { - FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) +type GroupNameFinder interface { + FindByName(ctx context.Context, name string, nocase bool) (*models.Group, error) } -func (p *FilenameParser) queryMovie(ctx context.Context, qb MovieNameFinder, movieName string) *models.Movie { - // massage the movie name - movieName = delimiterRE.ReplaceAllString(movieName, " ") +func (p *FilenameParser) queryGroup(ctx context.Context, qb GroupNameFinder, groupName string) *models.Group { + // massage the group name + groupName = delimiterRE.ReplaceAllString(groupName, " ") // check cache first - if ret, found := p.movieCache[movieName]; found { + if ret, found := p.groupCache[groupName]; found { return ret } - ret, _ := qb.FindByName(ctx, movieName, true) + ret, _ := qb.FindByName(ctx, groupName, true) // add result to cache - p.movieCache[movieName] = ret + p.groupCache[groupName] = ret return ret } @@ -665,18 +665,18 @@ func (p *FilenameParser) setStudio(ctx context.Context, qb models.StudioQueryer, } } -func (p *FilenameParser) setMovies(ctx context.Context, qb MovieNameFinder, h sceneHolder, result *models.SceneParserResult) { - // query for each movie - moviesSet := make(map[int]bool) - for _, movieName := range h.movies { - if movieName != "" { - movie := p.queryMovie(ctx, qb, movieName) - if movie != nil { - if _, found := moviesSet[movie.ID]; !found { +func (p *FilenameParser) setGroups(ctx context.Context, qb GroupNameFinder, h sceneHolder, result *models.SceneParserResult) { + // query for each group + groupsSet := make(map[int]bool) + for _, groupName := range h.groups { + if groupName != "" { + group := p.queryGroup(ctx, qb, groupName) + if group != nil { + if _, found := groupsSet[group.ID]; !found { result.Movies = append(result.Movies, &models.SceneMovieID{ - MovieID: strconv.Itoa(movie.ID), + MovieID: strconv.Itoa(group.ID), }) - moviesSet[movie.ID] = true + groupsSet[group.ID] = true } } } @@ -714,7 +714,7 @@ func (p *FilenameParser) setParserResult(ctx context.Context, h sceneHolder, res } p.setStudio(ctx, r.Studio, h, result) - if len(h.movies) > 0 { - p.setMovies(ctx, r.Movie, h, result) + if len(h.groups) > 0 { + p.setGroups(ctx, r.Group, h, result) } } diff --git a/pkg/scene/generate/generator.go b/pkg/scene/generate/generator.go index 70f6857ea5d..7e5705679d1 100644 --- a/pkg/scene/generate/generator.go +++ b/pkg/scene/generate/generator.go @@ -1,3 +1,4 @@ +// Package generate provides functions to generate media assets from scenes. package generate import ( diff --git a/pkg/scene/import.go b/pkg/scene/import.go index fc2db4dea94..b36e1bd68ab 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -26,7 +26,7 @@ type Importer struct { StudioWriter models.StudioFinderCreator GalleryFinder models.GalleryFinder PerformerWriter models.PerformerFinderCreator - MovieWriter models.MovieFinderCreator + GroupWriter models.GroupFinderCreator TagWriter models.TagFinderCreator Input jsonschema.Scene MissingRefBehaviour models.ImportMissingRefEnum @@ -62,7 +62,7 @@ func (i *Importer) PreImport(ctx context.Context) error { return err } - if err := i.populateMovies(ctx); err != nil { + if err := i.populateGroups(ctx); err != nil { return err } @@ -89,7 +89,7 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene { PerformerIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}), - Movies: models.NewRelatedMovies([]models.MoviesScenes{}), + Groups: models.NewRelatedGroups([]models.GroupsScenes{}), StashIDs: models.NewRelatedStashIDs(sceneJSON.StashIDs), } @@ -150,7 +150,7 @@ func (i *Importer) populateViewHistory() { } func (i *Importer) populateOHistory() { - i.viewHistory = getHistory( + i.oHistory = getHistory( i.Input.OHistory, i.Input.OCounter, i.Input.CreatedAt, // no last o count date @@ -335,24 +335,24 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod return ret, nil } -func (i *Importer) populateMovies(ctx context.Context) error { - if len(i.Input.Movies) > 0 { - for _, inputMovie := range i.Input.Movies { - movie, err := i.MovieWriter.FindByName(ctx, inputMovie.MovieName, false) +func (i *Importer) populateGroups(ctx context.Context) error { + if len(i.Input.Groups) > 0 { + for _, inputGroup := range i.Input.Groups { + group, err := i.GroupWriter.FindByName(ctx, inputGroup.GroupName, false) if err != nil { - return fmt.Errorf("error finding scene movie: %v", err) + return fmt.Errorf("error finding scene group: %v", err) } - var movieID int - if movie == nil { + var groupID int + if group == nil { if i.MissingRefBehaviour == models.ImportMissingRefEnumFail { - return fmt.Errorf("scene movie [%s] not found", inputMovie.MovieName) + return fmt.Errorf("scene group [%s] not found", inputGroup.GroupName) } if i.MissingRefBehaviour == models.ImportMissingRefEnumCreate { - movieID, err = i.createMovie(ctx, inputMovie.MovieName) + groupID, err = i.createGroup(ctx, inputGroup.GroupName) if err != nil { - return fmt.Errorf("error creating scene movie: %v", err) + return fmt.Errorf("error creating scene group: %v", err) } } @@ -361,35 +361,35 @@ func (i *Importer) populateMovies(ctx context.Context) error { continue } } else { - movieID = movie.ID + groupID = group.ID } - toAdd := models.MoviesScenes{ - MovieID: movieID, + toAdd := models.GroupsScenes{ + GroupID: groupID, } - if inputMovie.SceneIndex != 0 { - index := inputMovie.SceneIndex + if inputGroup.SceneIndex != 0 { + index := inputGroup.SceneIndex toAdd.SceneIndex = &index } - i.scene.Movies.Add(toAdd) + i.scene.Groups.Add(toAdd) } } return nil } -func (i *Importer) createMovie(ctx context.Context, name string) (int, error) { - newMovie := models.NewMovie() - newMovie.Name = name +func (i *Importer) createGroup(ctx context.Context, name string) (int, error) { + newGroup := models.NewGroup() + newGroup.Name = name - err := i.MovieWriter.Create(ctx, &newMovie) + err := i.GroupWriter.Create(ctx, &newGroup) if err != nil { return 0, err } - return newMovie.ID, nil + return newGroup.ID, nil } func (i *Importer) populateTags(ctx context.Context) error { diff --git a/pkg/scene/import_test.go b/pkg/scene/import_test.go index 26180785627..0e37dce16db 100644 --- a/pkg/scene/import_test.go +++ b/pkg/scene/import_test.go @@ -4,10 +4,13 @@ import ( "context" "errors" "testing" + "time" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/models/mocks" + "github.com/stashapp/stash/pkg/sliceutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) @@ -17,7 +20,7 @@ const invalidImage = "aW1hZ2VCeXRlcw&&" var ( existingStudioID = 101 existingPerformerID = 103 - existingMovieID = 104 + existingGroupID = 104 existingTagID = 105 existingStudioName = "existingStudioName" @@ -28,9 +31,9 @@ var ( existingPerformerErr = "existingPerformerErr" missingPerformerName = "missingPerformerName" - existingMovieName = "existingMovieName" - existingMovieErr = "existingMovieErr" - missingMovieName = "missingMovieName" + existingGroupName = "existingGroupName" + existingGroupErr = "existingGroupErr" + missingGroupName = "missingGroupName" existingTagName = "existingTagName" existingTagErr = "existingTagErr" @@ -40,6 +43,151 @@ var ( var testCtx = context.Background() func TestImporterPreImport(t *testing.T) { + var ( + title = "title" + code = "code" + details = "details" + director = "director" + endpoint1 = "endpoint1" + stashID1 = "stashID1" + endpoint2 = "endpoint2" + stashID2 = "stashID2" + url1 = "url1" + url2 = "url2" + rating = 3 + organized = true + + createdAt = time.Now().Add(-time.Hour) + updatedAt = time.Now().Add(-time.Minute) + + resumeTime = 1.234 + playDuration = 2.345 + ) + tests := []struct { + name string + input jsonschema.Scene + output models.Scene + }{ + { + "basic", + jsonschema.Scene{ + Title: title, + Code: code, + Details: details, + Director: director, + StashIDs: []models.StashID{ + {Endpoint: endpoint1, StashID: stashID1}, + {Endpoint: endpoint2, StashID: stashID2}, + }, + URLs: []string{url1, url2}, + Rating: rating, + Organized: organized, + CreatedAt: json.JSONTime{Time: createdAt}, + UpdatedAt: json.JSONTime{Time: updatedAt}, + ResumeTime: resumeTime, + PlayDuration: playDuration, + }, + models.Scene{ + Title: title, + Code: code, + Details: details, + Director: director, + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + {Endpoint: endpoint1, StashID: stashID1}, + {Endpoint: endpoint2, StashID: stashID2}, + }), + URLs: models.NewRelatedStrings([]string{url1, url2}), + Rating: &rating, + Organized: organized, + CreatedAt: createdAt.Truncate(0), + UpdatedAt: updatedAt.Truncate(0), + ResumeTime: resumeTime, + PlayDuration: playDuration, + + Files: models.NewRelatedVideoFiles([]*models.VideoFile{}), + GalleryIDs: models.NewRelatedIDs([]int{}), + TagIDs: models.NewRelatedIDs([]int{}), + PerformerIDs: models.NewRelatedIDs([]int{}), + Groups: models.NewRelatedGroups([]models.GroupsScenes{}), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := Importer{ + Input: tt.input, + } + + if err := i.PreImport(testCtx); err != nil { + t.Errorf("PreImport() error = %v", err) + return + } + + assert.Equal(t, tt.output, i.scene) + }) + } +} + +func truncateTimes(t []time.Time) []time.Time { + return sliceutil.Map(t, func(t time.Time) time.Time { return t.Truncate(0) }) +} + +func TestImporterPreImportHistory(t *testing.T) { + var ( + playTime1 = time.Now().Add(-time.Hour * 2) + playTime2 = time.Now().Add(-time.Minute * 2) + oTime1 = time.Now().Add(-time.Hour * 3) + oTime2 = time.Now().Add(-time.Minute * 3) + ) + tests := []struct { + name string + input jsonschema.Scene + expectedPlayHistory []time.Time + expectedOHistory []time.Time + }{ + { + "basic", + jsonschema.Scene{ + PlayHistory: []json.JSONTime{ + {Time: playTime1}, + {Time: playTime2}, + }, + OHistory: []json.JSONTime{ + {Time: oTime1}, + {Time: oTime2}, + }, + }, + []time.Time{playTime1, playTime2}, + []time.Time{oTime1, oTime2}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + i := Importer{ + Input: tt.input, + } + + if err := i.PreImport(testCtx); err != nil { + t.Errorf("PreImport() error = %v", err) + return + } + + // convert histories to unix timestamps for comparison + eph := truncateTimes(tt.expectedPlayHistory) + vh := truncateTimes(i.viewHistory) + + eoh := truncateTimes(tt.expectedOHistory) + oh := truncateTimes(i.oHistory) + + assert.Equal(t, eph, vh, "view history mismatch") + assert.Equal(t, eoh, oh, "o history mismatch") + }) + } +} + +func TestImporterPreImportCoverImage(t *testing.T) { i := Importer{ Input: jsonschema.Scene{ Cover: invalidImage, @@ -221,58 +369,58 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { db.AssertExpectations(t) } -func TestImporterPreImportWithMovie(t *testing.T) { +func TestImporterPreImportWithGroup(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - MovieWriter: db.Movie, + GroupWriter: db.Group, MissingRefBehaviour: models.ImportMissingRefEnumFail, Input: jsonschema.Scene{ - Movies: []jsonschema.SceneMovie{ + Groups: []jsonschema.SceneGroup{ { - MovieName: existingMovieName, + GroupName: existingGroupName, SceneIndex: 1, }, }, }, } - db.Movie.On("FindByName", testCtx, existingMovieName, false).Return(&models.Movie{ - ID: existingMovieID, - Name: existingMovieName, + db.Group.On("FindByName", testCtx, existingGroupName, false).Return(&models.Group{ + ID: existingGroupID, + Name: existingGroupName, }, nil).Once() - db.Movie.On("FindByName", testCtx, existingMovieErr, false).Return(nil, errors.New("FindByName error")).Once() + db.Group.On("FindByName", testCtx, existingGroupErr, false).Return(nil, errors.New("FindByName error")).Once() err := i.PreImport(testCtx) assert.Nil(t, err) - assert.Equal(t, existingMovieID, i.scene.Movies.List()[0].MovieID) + assert.Equal(t, existingGroupID, i.scene.Groups.List()[0].GroupID) - i.Input.Movies[0].MovieName = existingMovieErr + i.Input.Groups[0].GroupName = existingGroupErr err = i.PreImport(testCtx) assert.NotNil(t, err) db.AssertExpectations(t) } -func TestImporterPreImportWithMissingMovie(t *testing.T) { +func TestImporterPreImportWithMissingGroup(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - MovieWriter: db.Movie, + GroupWriter: db.Group, Input: jsonschema.Scene{ - Movies: []jsonschema.SceneMovie{ + Groups: []jsonschema.SceneGroup{ { - MovieName: missingMovieName, + GroupName: missingGroupName, }, }, }, MissingRefBehaviour: models.ImportMissingRefEnumFail, } - db.Movie.On("FindByName", testCtx, missingMovieName, false).Return(nil, nil).Times(3) - db.Movie.On("Create", testCtx, mock.AnythingOfType("*models.Movie")).Run(func(args mock.Arguments) { - m := args.Get(1).(*models.Movie) - m.ID = existingMovieID + db.Group.On("FindByName", testCtx, missingGroupName, false).Return(nil, nil).Times(3) + db.Group.On("Create", testCtx, mock.AnythingOfType("*models.Group")).Run(func(args mock.Arguments) { + m := args.Get(1).(*models.Group) + m.ID = existingGroupID }).Return(nil) err := i.PreImport(testCtx) @@ -285,28 +433,28 @@ func TestImporterPreImportWithMissingMovie(t *testing.T) { i.MissingRefBehaviour = models.ImportMissingRefEnumCreate err = i.PreImport(testCtx) assert.Nil(t, err) - assert.Equal(t, existingMovieID, i.scene.Movies.List()[0].MovieID) + assert.Equal(t, existingGroupID, i.scene.Groups.List()[0].GroupID) db.AssertExpectations(t) } -func TestImporterPreImportWithMissingMovieCreateErr(t *testing.T) { +func TestImporterPreImportWithMissingGroupCreateErr(t *testing.T) { db := mocks.NewDatabase() i := Importer{ - MovieWriter: db.Movie, + GroupWriter: db.Group, Input: jsonschema.Scene{ - Movies: []jsonschema.SceneMovie{ + Groups: []jsonschema.SceneGroup{ { - MovieName: missingMovieName, + GroupName: missingGroupName, }, }, }, MissingRefBehaviour: models.ImportMissingRefEnumCreate, } - db.Movie.On("FindByName", testCtx, missingMovieName, false).Return(nil, nil).Once() - db.Movie.On("Create", testCtx, mock.AnythingOfType("*models.Movie")).Return(errors.New("Create error")) + db.Group.On("FindByName", testCtx, missingGroupName, false).Return(nil, nil).Once() + db.Group.On("Create", testCtx, mock.AnythingOfType("*models.Group")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/scene/query.go b/pkg/scene/query.go index a8b1993a6a0..c640266f9ef 100644 --- a/pkg/scene/query.go +++ b/pkg/scene/query.go @@ -144,3 +144,15 @@ func CountByTagID(ctx context.Context, r models.SceneQueryer, id int, depth *int return r.QueryCount(ctx, filter, nil) } + +func CountByGroupID(ctx context.Context, r models.SceneQueryer, id int, depth *int) (int, error) { + filter := &models.SceneFilterType{ + Groups: &models.HierarchicalMultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + Depth: depth, + }, + } + + return r.QueryCount(ctx, filter, nil) +} diff --git a/pkg/scene/service.go b/pkg/scene/service.go index 05fa9f532eb..f5a117309c7 100644 --- a/pkg/scene/service.go +++ b/pkg/scene/service.go @@ -1,3 +1,5 @@ +// Package scene provides the application logic for scene functionality. +// Most functionality is provided by [Service]. package scene import ( diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index 498a4ce3996..1799892f91a 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -53,6 +53,7 @@ func isCDPPathWS(c GlobalConfig) bool { type SceneFinder interface { models.SceneGetter models.URLLoader + models.VideoFileLoader } type PerformerFinder interface { @@ -83,7 +84,7 @@ type Repository struct { GalleryFinder GalleryFinder TagFinder TagFinder PerformerFinder PerformerFinder - MovieFinder match.MovieNamesFinder + GroupFinder match.GroupNamesFinder StudioFinder StudioFinder } @@ -94,7 +95,7 @@ func NewRepository(repo models.Repository) Repository { GalleryFinder: repo.Gallery, TagFinder: repo.Tag, PerformerFinder: repo.Performer, - MovieFinder: repo.Movie, + GroupFinder: repo.Group, StudioFinder: repo.Studio, } } @@ -380,7 +381,15 @@ func (c Cache) getScene(ctx context.Context, sceneID int) (*models.Scene, error) return fmt.Errorf("scene with id %d not found", sceneID) } - return ret.LoadURLs(ctx, qb) + if err := ret.LoadURLs(ctx, qb); err != nil { + return err + } + + if err := ret.LoadFiles(ctx, qb); err != nil { + return err + } + + return nil }); err != nil { return nil, err } @@ -403,12 +412,15 @@ func (c Cache) getGallery(ctx context.Context, galleryID int) (*models.Gallery, return fmt.Errorf("gallery with id %d not found", galleryID) } - err = ret.LoadFiles(ctx, qb) - if err != nil { + if err := ret.LoadURLs(ctx, qb); err != nil { + return err + } + + if err := ret.LoadFiles(ctx, qb); err != nil { return err } - return ret.LoadURLs(ctx, qb) + return nil }); err != nil { return nil, err } diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go index 3a0aadf51e8..9c51b4bba95 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/config.go @@ -45,8 +45,9 @@ type config struct { // Configuration for querying a gallery by a URL GalleryByURL []*scrapeByURLConfig `yaml:"galleryByURL"` - // Configuration for querying a movie by a URL + // Configuration for querying a movie by a URL - deprecated, use GroupByURL MovieByURL []*scrapeByURLConfig `yaml:"movieByURL"` + GroupByURL []*scrapeByURLConfig `yaml:"groupByURL"` // Scraper debugging options DebugOptions *scraperDebugOptions `yaml:"debug"` @@ -99,7 +100,11 @@ func (c config) validate() error { } } - for _, s := range c.MovieByURL { + if len(c.MovieByURL) > 0 && len(c.GroupByURL) > 0 { + return errors.New("movieByURL disallowed if groupByURL is present") + } + + for _, s := range append(c.MovieByURL, c.GroupByURL...) { if err := s.validate(); err != nil { return err } @@ -289,16 +294,17 @@ func (c config) spec() Scraper { ret.Gallery = &gallery } - movie := ScraperSpec{} - if len(c.MovieByURL) > 0 { - movie.SupportedScrapes = append(movie.SupportedScrapes, ScrapeTypeURL) - for _, v := range c.MovieByURL { - movie.Urls = append(movie.Urls, v.URL...) + group := ScraperSpec{} + if len(c.MovieByURL) > 0 || len(c.GroupByURL) > 0 { + group.SupportedScrapes = append(group.SupportedScrapes, ScrapeTypeURL) + for _, v := range append(c.MovieByURL, c.GroupByURL...) { + group.Urls = append(group.Urls, v.URL...) } } - if len(movie.SupportedScrapes) > 0 { - ret.Movie = &movie + if len(group.SupportedScrapes) > 0 { + ret.Movie = &group + ret.Group = &group } return ret @@ -312,8 +318,8 @@ func (c config) supports(ty ScrapeContentType) bool { return (c.SceneByName != nil && c.SceneByQueryFragment != nil) || c.SceneByFragment != nil || len(c.SceneByURL) > 0 case ScrapeContentTypeGallery: return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0 - case ScrapeContentTypeMovie: - return len(c.MovieByURL) > 0 + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: + return len(c.MovieByURL) > 0 || len(c.GroupByURL) > 0 } panic("Unhandled ScrapeContentType") @@ -339,7 +345,7 @@ func (c config) matchesURL(url string, ty ScrapeContentType) bool { return true } } - case ScrapeContentTypeMovie: + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: for _, scraper := range c.MovieByURL { if scraper.matchesURL(url) { return true diff --git a/pkg/scraper/freeones.go b/pkg/scraper/freeones.go index 9a8eb4859c9..4b414541dd2 100644 --- a/pkg/scraper/freeones.go +++ b/pkg/scraper/freeones.go @@ -42,44 +42,48 @@ xPathScrapers: selector: //h1 postProcess: - replace: - - regex: \sBio\s*$ - with: "" + - regex: (.+)\sidentifies.+ + with: $1 URL: //link[@rel="alternate" and @hreflang="x-default"]/@href - Twitter: //a[not(starts-with(@href,'https://twitter.com/FreeOnes'))][contains(@href,'twitter.com/')]/@href - Instagram: //a[contains(@href,'instagram.com/')]/@href + Twitter: //form//a[contains(@href,'twitter.com/')]/@href + Instagram: //form//a[contains(@href,'instagram.com/')]/@href Birthdate: - selector: //span[contains(text(),'Born On')] + selector: //span[@data-test="link_span_dateOfBirth"]/text() postProcess: - - replace: - - regex: Born On - with: - parseDate: January 2, 2006 Ethnicity: - selector: //a[@data-test="link_ethnicity"]/span/text() + selector: //span[@data-test="link_span_ethnicity"] postProcess: - map: Asian: Asian Caucasian: White Black: Black Latin: Hispanic - Country: //a[@data-test="link-country"]/span/text() - EyeColor: //span[text()='Eye Color']/following-sibling::span/a + Country: + selector: //a[@data-test="link_placeOfBirth"][contains(@href, 'country')]/span/text() + postProcess: + - map: + United States: "USA" + EyeColor: //span[text()='Eye Color:']/following-sibling::span/a/span/text() Height: - selector: //span[text()='Height']/following-sibling::span/a + selector: //span[text()='Height:']/following-sibling::span/a postProcess: - replace: - - regex: \D+[\s\S]+ - with: "" + - regex: \scm + with: "" - map: Unknown: "" Measurements: - selector: //span[text()='Measurements']/following-sibling::span/span/a + selector: //span[(@data-test='link_span_bra') or (@data-test='link_span_waist') or (@data-test='link_span_hip')] concat: " - " postProcess: + - replace: + - regex: \sIn + with: "" - map: Unknown: "" FakeTits: - selector: //span[text()='Boobs']/following-sibling::span/a + selector: //span[text()='Boobs:']/following-sibling::span/a postProcess: - map: Unknown: "" @@ -88,14 +92,16 @@ xPathScrapers: CareerLength: selector: //div[contains(@class,'timeline-horizontal')]//p[@class='m-0'] concat: "-" - Aliases: //p[@data-test='p_aliases']/text() + Aliases: + selector: //span[@data-test='link_span_aliases']/text() + concat: ", " Tattoos: - selector: //span[text()='Tattoos']/following-sibling::span/span + selector: //span[text()='Tattoo locations:']/following-sibling::span postProcess: - map: Unknown: "" Piercings: - selector: //span[text()='Piercings']/following-sibling::span/span + selector: //span[text()='Piercing locations:']/following-sibling::span postProcess: - map: Unknown: "" @@ -103,7 +109,6 @@ xPathScrapers: selector: //div[contains(@class,'image-container')]//a/img/@src Gender: fixed: "Female" - Details: //div[@data-test="biography"] DeathDate: selector: //div[contains(text(),'Passed away on')] postProcess: @@ -111,15 +116,15 @@ xPathScrapers: - regex: Passed away on (.+) at the age of \d+ with: $1 - parseDate: January 2, 2006 - HairColor: //span[text()='Hair Color']/following-sibling::span/a + HairColor: //span[@data-test="link_span_hair_color"] Weight: - selector: //span[text()='Weight']/following-sibling::span/a + selector: //span[@data-test="link_span_weight"] postProcess: - - replace: - - regex: \D+[\s\S]+ + - replace: + - regex: \skg with: "" -# Last updated April 13, 2021 +# Last Updated January 2, 2024 ` func getFreeonesScraper(globalConfig GlobalConfig) scraper { diff --git a/pkg/scraper/group.go b/pkg/scraper/group.go index bbf0a680adb..fff9beb2fe2 100644 --- a/pkg/scraper/group.go +++ b/pkg/scraper/group.go @@ -81,8 +81,8 @@ func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig { return c.PerformerByURL case ScrapeContentTypeScene: return c.SceneByURL - case ScrapeContentTypeMovie: - return c.MovieByURL + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: + return append(c.MovieByURL, c.GroupByURL...) case ScrapeContentTypeGallery: return c.GalleryByURL } diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go index 5757bc9b383..193ddc517b6 100644 --- a/pkg/scraper/image.go +++ b/pkg/scraper/image.go @@ -12,11 +12,19 @@ import ( ) func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error { - if p.Image == nil || !strings.HasPrefix(*p.Image, "http") { + // backwards compatibility: we fetch the image if it's a URL and set it to the first image + // Image is deprecated, so only do this if Images is unset + if p.Image == nil || len(p.Images) > 0 { // nothing to do return nil } + // don't try to get the image if it doesn't appear to be a URL + if !strings.HasPrefix(*p.Image, "http") { + p.Images = []string{*p.Image} + return nil + } + img, err := getImage(ctx, *p.Image, client, globalConfig) if err != nil { return err @@ -80,6 +88,40 @@ func setMovieBackImage(ctx context.Context, client *http.Client, m *models.Scrap return nil } +func setGroupFrontImage(ctx context.Context, client *http.Client, m *models.ScrapedGroup, globalConfig GlobalConfig) error { + // don't try to get the image if it doesn't appear to be a URL + if m.FrontImage == nil || !strings.HasPrefix(*m.FrontImage, "http") { + // nothing to do + return nil + } + + img, err := getImage(ctx, *m.FrontImage, client, globalConfig) + if err != nil { + return err + } + + m.FrontImage = img + + return nil +} + +func setGroupBackImage(ctx context.Context, client *http.Client, m *models.ScrapedGroup, globalConfig GlobalConfig) error { + // don't try to get the image if it doesn't appear to be a URL + if m.BackImage == nil || !strings.HasPrefix(*m.BackImage, "http") { + // nothing to do + return nil + } + + img, err := getImage(ctx, *m.BackImage, client, globalConfig) + if err != nil { + return err + } + + m.BackImage = img + + return nil +} + func getImage(ctx context.Context, url string, client *http.Client, globalConfig GlobalConfig) (*string, error) { req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { diff --git a/pkg/scraper/json.go b/pkg/scraper/json.go index 1d6358a921f..929b5152e24 100644 --- a/pkg/scraper/json.go +++ b/pkg/scraper/json.go @@ -66,7 +66,7 @@ func (s *jsonScraper) loadURL(ctx context.Context, url string) (string, error) { return "", errors.New("not valid json") } - if err == nil && s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML { + if s.config.DebugOptions != nil && s.config.DebugOptions.PrintHTML { logger.Infof("loadURL (%s) response: \n%s", url, docStr) } @@ -81,15 +81,33 @@ func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCont } q := s.getJsonQuery(doc) + // if these just return the return values from scraper.scrape* functions then + // it ends up returning ScrapedContent(nil) rather than nil switch ty { case ScrapeContentTypePerformer: - return scraper.scrapePerformer(ctx, q) + ret, err := scraper.scrapePerformer(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil case ScrapeContentTypeScene: - return scraper.scrapeScene(ctx, q) + ret, err := scraper.scrapeScene(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil case ScrapeContentTypeGallery: - return scraper.scrapeGallery(ctx, q) - case ScrapeContentTypeMovie: - return scraper.scrapeMovie(ctx, q) + ret, err := scraper.scrapeGallery(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: + ret, err := scraper.scrapeGroup(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil } return nil, ErrNotSupported diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index f8a09601503..a6b70565fd7 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -284,11 +284,13 @@ type mappedMovieScraperConfig struct { mappedConfig Studio mappedConfig `yaml:"Studio"` + Tags mappedConfig `yaml:"Tags"` } type _mappedMovieScraperConfig mappedMovieScraperConfig const ( mappedScraperConfigMovieStudio = "Studio" + mappedScraperConfigMovieTags = "Tags" ) func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { @@ -303,9 +305,11 @@ func (s *mappedMovieScraperConfig) UnmarshalYAML(unmarshal func(interface{}) err thisMap := make(map[string]interface{}) thisMap[mappedScraperConfigMovieStudio] = parentMap[mappedScraperConfigMovieStudio] - delete(parentMap, mappedScraperConfigMovieStudio) + thisMap[mappedScraperConfigMovieTags] = parentMap[mappedScraperConfigMovieTags] + delete(parentMap, mappedScraperConfigMovieTags) + // re-unmarshal the sub-fields yml, err := yaml.Marshal(thisMap) if err != nil { @@ -534,6 +538,21 @@ func (p *postProcessJavascript) Apply(ctx context.Context, value string, q mappe return value } + log := &javascript.Log{ + Logger: logger.Logger, + Prefix: "", + ProgressChan: make(chan float64), + } + + if err := log.AddToVM("log", vm); err != nil { + logger.Logger.Errorf("error adding log API: %w", err) + } + + util := &javascript.Util{} + if err := util.AddToVM("util", vm); err != nil { + logger.Logger.Errorf("error adding util API: %w", err) + } + script, err := javascript.CompileScript("", "(function() { "+string(*p)+"})()") if err != nil { logger.Warnf("javascript failed to compile: %v", err) @@ -1060,7 +1079,7 @@ func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*Scrap return &ret, nil } -func (s mappedScraper) scrapeMovie(ctx context.Context, q mappedQuery) (*models.ScrapedMovie, error) { +func (s mappedScraper) scrapeGroup(ctx context.Context, q mappedQuery) (*models.ScrapedMovie, error) { var ret models.ScrapedMovie movieScraperConfig := s.Movie @@ -1071,6 +1090,7 @@ func (s mappedScraper) scrapeMovie(ctx context.Context, q mappedQuery) (*models. movieMap := movieScraperConfig.mappedConfig movieStudioMap := movieScraperConfig.Studio + movieTagsMap := movieScraperConfig.Tags results := movieMap.process(ctx, q, s.Common) @@ -1085,7 +1105,19 @@ func (s mappedScraper) scrapeMovie(ctx context.Context, q mappedQuery) (*models. } } - if len(results) == 0 && ret.Studio == nil { + // now apply the tags + if movieTagsMap != nil { + logger.Debug(`Processing movie tags:`) + tagResults := movieTagsMap.process(ctx, q, s.Common) + + for _, p := range tagResults { + tag := &models.ScrapedTag{} + p.apply(tag) + ret.Tags = append(ret.Tags, tag) + } + } + + if len(results) == 0 && ret.Studio == nil && len(ret.Tags) == 0 { return nil, nil } diff --git a/pkg/scraper/movie.go b/pkg/scraper/movie.go index 4416b6199cb..00c89ad9c45 100644 --- a/pkg/scraper/movie.go +++ b/pkg/scraper/movie.go @@ -1,12 +1,15 @@ package scraper type ScrapedMovieInput struct { - Name *string `json:"name"` - Aliases *string `json:"aliases"` - Duration *string `json:"duration"` - Date *string `json:"date"` - Rating *string `json:"rating"` - Director *string `json:"director"` - URL *string `json:"url"` - Synopsis *string `json:"synopsis"` + Name *string `json:"name"` + Aliases *string `json:"aliases"` + Duration *string `json:"duration"` + Date *string `json:"date"` + Rating *string `json:"rating"` + Director *string `json:"director"` + URLs []string `json:"urls"` + Synopsis *string `json:"synopsis"` + + // deprecated + URL *string `json:"url"` } diff --git a/pkg/scraper/performer.go b/pkg/scraper/performer.go index 26936882366..98e93176205 100644 --- a/pkg/scraper/performer.go +++ b/pkg/scraper/performer.go @@ -2,29 +2,30 @@ package scraper type ScrapedPerformerInput struct { // Set if performer matched - StoredID *string `json:"stored_id"` - Name *string `json:"name"` - Disambiguation *string `json:"disambiguation"` - Gender *string `json:"gender"` - URL *string `json:"url"` - Twitter *string `json:"twitter"` - Instagram *string `json:"instagram"` - Birthdate *string `json:"birthdate"` - Ethnicity *string `json:"ethnicity"` - Country *string `json:"country"` - EyeColor *string `json:"eye_color"` - Height *string `json:"height"` - Measurements *string `json:"measurements"` - FakeTits *string `json:"fake_tits"` - PenisLength *string `json:"penis_length"` - Circumcised *string `json:"circumcised"` - CareerLength *string `json:"career_length"` - Tattoos *string `json:"tattoos"` - Piercings *string `json:"piercings"` - Aliases *string `json:"aliases"` - Details *string `json:"details"` - DeathDate *string `json:"death_date"` - HairColor *string `json:"hair_color"` - Weight *string `json:"weight"` - RemoteSiteID *string `json:"remote_site_id"` + StoredID *string `json:"stored_id"` + Name *string `json:"name"` + Disambiguation *string `json:"disambiguation"` + Gender *string `json:"gender"` + URLs []string `json:"urls"` + URL *string `json:"url"` // deprecated + Twitter *string `json:"twitter"` // deprecated + Instagram *string `json:"instagram"` // deprecated + Birthdate *string `json:"birthdate"` + Ethnicity *string `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *string `json:"eye_color"` + Height *string `json:"height"` + Measurements *string `json:"measurements"` + FakeTits *string `json:"fake_tits"` + PenisLength *string `json:"penis_length"` + Circumcised *string `json:"circumcised"` + CareerLength *string `json:"career_length"` + Tattoos *string `json:"tattoos"` + Piercings *string `json:"piercings"` + Aliases *string `json:"aliases"` + Details *string `json:"details"` + DeathDate *string `json:"death_date"` + HairColor *string `json:"hair_color"` + Weight *string `json:"weight"` + RemoteSiteID *string `json:"remote_site_id"` } diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go index 0cf9b5a17fb..09a4657c3f2 100644 --- a/pkg/scraper/postprocessing.go +++ b/pkg/scraper/postprocessing.go @@ -6,6 +6,7 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) // postScrape handles post-processing of scraped content. If the content @@ -38,6 +39,12 @@ func (c Cache) postScrape(ctx context.Context, content ScrapedContent) (ScrapedC } case models.ScrapedMovie: return c.postScrapeMovie(ctx, v) + case *models.ScrapedGroup: + if v != nil { + return c.postScrapeGroup(ctx, *v) + } + case models.ScrapedGroup: + return c.postScrapeGroup(ctx, v) } // If nothing matches, pass the content through @@ -67,17 +74,53 @@ func (c Cache) postScrapePerformer(ctx context.Context, p models.ScrapedPerforme p.Country = resolveCountryName(p.Country) + // populate URL/URLs + // if URLs are provided, only use those + if len(p.URLs) > 0 { + p.URL = &p.URLs[0] + } else { + urls := []string{} + if p.URL != nil { + urls = append(urls, *p.URL) + } + if p.Twitter != nil && *p.Twitter != "" { + // handle twitter profile names + u := utils.URLFromHandle(*p.Twitter, "https://twitter.com") + urls = append(urls, u) + } + if p.Instagram != nil && *p.Instagram != "" { + // handle instagram profile names + u := utils.URLFromHandle(*p.Instagram, "https://instagram.com") + urls = append(urls, u) + } + + if len(urls) > 0 { + p.URLs = urls + } + } + return p, nil } func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie) (ScrapedContent, error) { - if m.Studio != nil { - r := c.repository - if err := r.WithReadTxn(ctx, func(ctx context.Context) error { - return match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, nil) - }); err != nil { - return nil, err + r := c.repository + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + tqb := r.TagFinder + tags, err := postProcessTags(ctx, tqb, m.Tags) + if err != nil { + return err } + m.Tags = tags + + if m.Studio != nil { + if err := match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, nil); err != nil { + return err + } + } + + return nil + }); err != nil { + return nil, err } // post-process - set the image if applicable @@ -91,6 +134,38 @@ func (c Cache) postScrapeMovie(ctx context.Context, m models.ScrapedMovie) (Scra return m, nil } +func (c Cache) postScrapeGroup(ctx context.Context, m models.ScrapedGroup) (ScrapedContent, error) { + r := c.repository + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + tqb := r.TagFinder + tags, err := postProcessTags(ctx, tqb, m.Tags) + if err != nil { + return err + } + m.Tags = tags + + if m.Studio != nil { + if err := match.ScrapedStudio(ctx, r.StudioFinder, m.Studio, nil); err != nil { + return err + } + } + + return nil + }); err != nil { + return nil, err + } + + // post-process - set the image if applicable + if err := setGroupFrontImage(ctx, c.client, &m, c.globalConfig); err != nil { + logger.Warnf("could not set front image using URL %s: %v", *m.FrontImage, err) + } + if err := setGroupBackImage(ctx, c.client, &m, c.globalConfig); err != nil { + logger.Warnf("could not set back image using URL %s: %v", *m.BackImage, err) + } + + return m, nil +} + func (c Cache) postScrapeScenePerformer(ctx context.Context, p models.ScrapedPerformer) error { tqb := c.repository.TagFinder @@ -117,7 +192,7 @@ func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene) (Scraped r := c.repository if err := r.WithReadTxn(ctx, func(ctx context.Context) error { pqb := r.PerformerFinder - mqb := r.MovieFinder + gqb := r.GroupFinder tqb := r.TagFinder sqb := r.StudioFinder @@ -136,10 +211,39 @@ func (c Cache) postScrapeScene(ctx context.Context, scene ScrapedScene) (Scraped } for _, p := range scene.Movies { - err := match.ScrapedMovie(ctx, mqb, p) + matchedID, err := match.ScrapedGroup(ctx, gqb, p.StoredID, p.Name) if err != nil { return err } + + if matchedID != nil { + p.StoredID = matchedID + } + } + + for _, p := range scene.Groups { + matchedID, err := match.ScrapedGroup(ctx, gqb, p.StoredID, p.Name) + if err != nil { + return err + } + + if matchedID != nil { + p.StoredID = matchedID + } + } + + // HACK - if movies was returned but not groups, add the groups from the movies + // if groups was returned but not movies, add the movies from the groups for backward compatibility + if len(scene.Movies) > 0 && len(scene.Groups) == 0 { + for _, m := range scene.Movies { + g := m.ScrapedGroup() + scene.Groups = append(scene.Groups, &g) + } + } else if len(scene.Groups) > 0 && len(scene.Movies) == 0 { + for _, g := range scene.Groups { + m := g.ScrapedMovie() + scene.Movies = append(scene.Movies, &m) + } } tags, err := postProcessTags(ctx, tqb, scene.Tags) diff --git a/pkg/scraper/scene.go b/pkg/scraper/scene.go index e5de74a23f1..1ffc20996bb 100644 --- a/pkg/scraper/scene.go +++ b/pkg/scraper/scene.go @@ -18,6 +18,7 @@ type ScrapedScene struct { Studio *models.ScrapedStudio `json:"studio"` Tags []*models.ScrapedTag `json:"tags"` Performers []*models.ScrapedPerformer `json:"performers"` + Groups []*models.ScrapedGroup `json:"groups"` Movies []*models.ScrapedMovie `json:"movies"` RemoteSiteID *string `json:"remote_site_id"` Duration *int `json:"duration"` diff --git a/pkg/scraper/scraper.go b/pkg/scraper/scraper.go index 23ad411bdb0..56c8f007398 100644 --- a/pkg/scraper/scraper.go +++ b/pkg/scraper/scraper.go @@ -1,3 +1,5 @@ +// Package scraper provides interfaces to interact with the scraper subsystem. +// The [Cache] type is the main entry point to the scraper subsystem. package scraper import ( @@ -31,6 +33,7 @@ type ScrapeContentType string const ( ScrapeContentTypeGallery ScrapeContentType = "GALLERY" ScrapeContentTypeMovie ScrapeContentType = "MOVIE" + ScrapeContentTypeGroup ScrapeContentType = "GROUP" ScrapeContentTypePerformer ScrapeContentType = "PERFORMER" ScrapeContentTypeScene ScrapeContentType = "SCENE" ) @@ -38,13 +41,14 @@ const ( var AllScrapeContentType = []ScrapeContentType{ ScrapeContentTypeGallery, ScrapeContentTypeMovie, + ScrapeContentTypeGroup, ScrapeContentTypePerformer, ScrapeContentTypeScene, } func (e ScrapeContentType) IsValid() bool { switch e { - case ScrapeContentTypeGallery, ScrapeContentTypeMovie, ScrapeContentTypePerformer, ScrapeContentTypeScene: + case ScrapeContentTypeGallery, ScrapeContentTypeMovie, ScrapeContentTypeGroup, ScrapeContentTypePerformer, ScrapeContentTypeScene: return true } return false @@ -81,6 +85,8 @@ type Scraper struct { // Details for gallery scraper Gallery *ScraperSpec `json:"gallery"` // Details for movie scraper + Group *ScraperSpec `json:"group"` + // Details for movie scraper Movie *ScraperSpec `json:"movie"` } @@ -163,6 +169,12 @@ func (i *Input) populateURL() { if i.Scene != nil && i.Scene.URL == nil && len(i.Scene.URLs) > 0 { i.Scene.URL = &i.Scene.URLs[0] } + if i.Gallery != nil && i.Gallery.URL == nil && len(i.Gallery.URLs) > 0 { + i.Gallery.URL = &i.Gallery.URLs[0] + } + if i.Performer != nil && i.Performer.URL == nil && len(i.Performer.URLs) > 0 { + i.Performer.URL = &i.Performer.URLs[0] + } } // simple type definitions that can help customize diff --git a/pkg/scraper/script.go b/pkg/scraper/script.go index bfb03ee3aad..bff78ac791b 100644 --- a/pkg/scraper/script.go +++ b/pkg/scraper/script.go @@ -8,14 +8,203 @@ import ( "io" "os/exec" "path/filepath" + "strconv" "strings" stashExec "github.com/stashapp/stash/pkg/exec" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" + stashJson "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/python" ) +// inputs for scrapers + +type fingerprintInput struct { + Type string `json:"type,omitempty"` + Fingerprint string `json:"fingerprint,omitempty"` +} + +type fileInput struct { + ID string `json:"id"` + ZipFile *fileInput `json:"zip_file,omitempty"` + ModTime stashJson.JSONTime `json:"mod_time"` + + Path string `json:"path,omitempty"` + + Fingerprints []fingerprintInput `json:"fingerprints,omitempty"` + Size int64 `json:"size,omitempty"` +} + +type videoFileInput struct { + fileInput + Format string `json:"format,omitempty"` + Width int `json:"width,omitempty"` + Height int `json:"height,omitempty"` + Duration float64 `json:"duration,omitempty"` + VideoCodec string `json:"video_codec,omitempty"` + AudioCodec string `json:"audio_codec,omitempty"` + FrameRate float64 `json:"frame_rate,omitempty"` + BitRate int64 `json:"bitrate,omitempty"` + + Interactive bool `json:"interactive,omitempty"` + InteractiveSpeed *int `json:"interactive_speed,omitempty"` +} + +// sceneInput is the input passed to the scraper for an existing scene +type sceneInput struct { + ID string `json:"id"` + Title string `json:"title"` + Code string `json:"code,omitempty"` + + // deprecated - use urls instead + URL *string `json:"url"` + URLs []string `json:"urls"` + + // don't use omitempty for these to maintain backwards compatibility + Date *string `json:"date"` + Details string `json:"details"` + + Director string `json:"director,omitempty"` + + Files []videoFileInput `json:"files,omitempty"` +} + +func fileInputFromFile(f models.BaseFile) fileInput { + b := f.Base() + var z *fileInput + if b.ZipFile != nil { + zz := fileInputFromFile(*b.ZipFile.Base()) + z = &zz + } + + ret := fileInput{ + ID: f.ID.String(), + ZipFile: z, + ModTime: stashJson.JSONTime{Time: f.ModTime}, + Path: f.Path, + Size: f.Size, + } + + for _, fp := range f.Fingerprints { + ret.Fingerprints = append(ret.Fingerprints, fingerprintInput{ + Type: fp.Type, + Fingerprint: fp.Value(), + }) + } + + return ret +} + +func videoFileInputFromVideoFile(vf *models.VideoFile) videoFileInput { + return videoFileInput{ + fileInput: fileInputFromFile(*vf.Base()), + Format: vf.Format, + Width: vf.Width, + Height: vf.Height, + Duration: vf.Duration, + VideoCodec: vf.VideoCodec, + AudioCodec: vf.AudioCodec, + FrameRate: vf.FrameRate, + BitRate: vf.BitRate, + Interactive: vf.Interactive, + InteractiveSpeed: vf.InteractiveSpeed, + } +} + +func sceneInputFromScene(scene *models.Scene) sceneInput { + dateToStringPtr := func(s *models.Date) *string { + if s != nil { + v := s.String() + return &v + } + + return nil + } + + // fallback to file basename if title is empty + title := scene.GetTitle() + + var url *string + urls := scene.URLs.List() + if len(urls) > 0 { + url = &urls[0] + } + + ret := sceneInput{ + ID: strconv.Itoa(scene.ID), + Title: title, + Details: scene.Details, + // include deprecated URL for now + URL: url, + URLs: urls, + Date: dateToStringPtr(scene.Date), + Code: scene.Code, + Director: scene.Director, + } + + for _, f := range scene.Files.List() { + vf := videoFileInputFromVideoFile(f) + ret.Files = append(ret.Files, vf) + } + + return ret +} + +type galleryInput struct { + ID string `json:"id"` + Title string `json:"title"` + Urls []string `json:"urls"` + Date *string `json:"date"` + Details string `json:"details"` + + Code string `json:"code,omitempty"` + Photographer string `json:"photographer,omitempty"` + + Files []fileInput `json:"files,omitempty"` + + // deprecated + URL *string `json:"url"` +} + +func galleryInputFromGallery(gallery *models.Gallery) galleryInput { + dateToStringPtr := func(s *models.Date) *string { + if s != nil { + v := s.String() + return &v + } + + return nil + } + + // fallback to file basename if title is empty + title := gallery.GetTitle() + + var url *string + urls := gallery.URLs.List() + if len(urls) > 0 { + url = &urls[0] + } + + ret := galleryInput{ + ID: strconv.Itoa(gallery.ID), + Title: title, + Details: gallery.Details, + URL: url, + Urls: urls, + Date: dateToStringPtr(gallery.Date), + Code: gallery.Code, + Photographer: gallery.Photographer, + } + + for _, f := range gallery.Files.List() { + fi := fileInputFromFile(*f.Base()) + ret.Files = append(ret.Files, fi) + } + + return ret +} + var ErrScraperScript = errors.New("scraper script error") type scriptScraper struct { @@ -43,7 +232,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o if err != nil { logger.Warnf("%s", err) } else { - cmd = p.Command(context.TODO(), command[1:]) + cmd = p.Command(ctx, command[1:]) envVariable, _ := filepath.Abs(filepath.Dir(filepath.Dir(s.config.path))) python.AppendPythonPath(cmd, envVariable) } @@ -51,7 +240,7 @@ func (s *scriptScraper) runScraperScript(ctx context.Context, inString string, o if cmd == nil { // if could not find python, just use the command args as-is - cmd = stashExec.Command(command[0], command[1:]...) + cmd = stashExec.CommandContext(ctx, command[0], command[1:]...) } cmd.Dir = filepath.Dir(s.config.path) @@ -195,7 +384,7 @@ func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeConte var scene *ScrapedScene err := s.runScraperScript(ctx, input, &scene) return scene, err - case ScrapeContentTypeMovie: + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: var movie *models.ScrapedMovie err := s.runScraperScript(ctx, input, &movie) return movie, err @@ -205,7 +394,7 @@ func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeConte } func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error) { - inString, err := json.Marshal(sceneToUpdateInput(scene)) + inString, err := json.Marshal(sceneInputFromScene(scene)) if err != nil { return nil, err @@ -219,7 +408,7 @@ func (s *scriptScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sc } func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) { - inString, err := json.Marshal(galleryToUpdateInput(gallery)) + inString, err := json.Marshal(galleryInputFromGallery(gallery)) if err != nil { return nil, err diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index b7f483667a3..a50db8b5e61 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "net/http" - "strconv" "github.com/jinzhu/copier" "github.com/shurcooL/graphql" @@ -310,62 +309,3 @@ func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode func (s *stashScraper) scrapeByURL(_ context.Context, _ string, _ ScrapeContentType) (ScrapedContent, error) { return nil, ErrNotSupported } - -func sceneToUpdateInput(scene *models.Scene) models.SceneUpdateInput { - dateToStringPtr := func(s *models.Date) *string { - if s != nil { - v := s.String() - return &v - } - - return nil - } - - // fallback to file basename if title is empty - title := scene.GetTitle() - - var url *string - urls := scene.URLs.List() - if len(urls) > 0 { - url = &urls[0] - } - - return models.SceneUpdateInput{ - ID: strconv.Itoa(scene.ID), - Title: &title, - Details: &scene.Details, - // include deprecated URL for now - URL: url, - Urls: urls, - Date: dateToStringPtr(scene.Date), - } -} - -func galleryToUpdateInput(gallery *models.Gallery) models.GalleryUpdateInput { - dateToStringPtr := func(s *models.Date) *string { - if s != nil { - v := s.String() - return &v - } - - return nil - } - - // fallback to file basename if title is empty - title := gallery.GetTitle() - - var url *string - urls := gallery.URLs.List() - if len(urls) > 0 { - url = &urls[0] - } - - return models.GalleryUpdateInput{ - ID: strconv.Itoa(gallery.ID), - Title: &title, - Details: &gallery.Details, - URL: url, - Urls: urls, - Date: dateToStringPtr(gallery.Date), - } -} diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index c833d3d0ca7..0b0cf68d67e 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -1,3 +1,4 @@ +// Package stashbox provides a client interface to a stash-box server instance. package stashbox import ( @@ -9,7 +10,6 @@ import ( "io" "mime/multipart" "net/http" - "regexp" "strconv" "strings" @@ -41,6 +41,7 @@ type PerformerReader interface { match.PerformerFinder models.AliasLoader models.StashIDLoader + models.URLLoader FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) GetImage(ctx context.Context, performerID int) ([]byte, error) } @@ -251,12 +252,14 @@ func (c Client) findStashBoxScenesByFingerprints(ctx context.Context, scenes [][ return ret, nil } -func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []string, endpoint string) (bool, error) { +func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []string) (bool, error) { ids, err := stringslice.StringSliceToIntSlice(sceneIDs) if err != nil { return false, err } + endpoint := c.box.Endpoint + var fingerprints []graphql.FingerprintSubmission r := c.repository @@ -683,6 +686,10 @@ func performerFragmentToScrapedPerformer(p graphql.PerformerFragment) *models.Sc sp.Aliases = &alias } + for _, u := range p.Urls { + sp.URLs = append(sp.URLs, u.URL) + } + return sp } @@ -945,12 +952,13 @@ func appendFingerprintUnique(v []*graphql.FingerprintInput, toAdd *graphql.Finge return append(v, toAdd) } -func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpoint string, cover []byte) (*string, error) { +func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, cover []byte) (*string, error) { draft := graphql.SceneDraftInput{} var image io.Reader r := c.repository pqb := r.Performer sqb := r.Studio + endpoint := c.box.Endpoint if scene.Title != "" { draft.Title = &scene.Title @@ -1115,15 +1123,20 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo // return id, nil } -func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Performer, endpoint string) (*string, error) { +func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Performer) (*string, error) { draft := graphql.PerformerDraftInput{} var image io.Reader pqb := c.repository.Performer + endpoint := c.box.Endpoint if err := performer.LoadAliases(ctx, pqb); err != nil { return nil, err } + if err := performer.LoadURLs(ctx, pqb); err != nil { + return nil, err + } + if err := performer.LoadStashIDs(ctx, pqb); err != nil { return nil, err } @@ -1191,28 +1204,8 @@ func (c Client) SubmitPerformerDraft(ctx context.Context, performer *models.Perf } } - var urls []string - if len(strings.TrimSpace(performer.Twitter)) > 0 { - reg := regexp.MustCompile(`https?:\/\/(?:www\.)?twitter\.com`) - if reg.MatchString(performer.Twitter) { - urls = append(urls, strings.TrimSpace(performer.Twitter)) - } else { - urls = append(urls, "https://twitter.com/"+strings.TrimSpace(performer.Twitter)) - } - } - if len(strings.TrimSpace(performer.Instagram)) > 0 { - reg := regexp.MustCompile(`https?:\/\/(?:www\.)?instagram\.com`) - if reg.MatchString(performer.Instagram) { - urls = append(urls, strings.TrimSpace(performer.Instagram)) - } else { - urls = append(urls, "https://instagram.com/"+strings.TrimSpace(performer.Instagram)) - } - } - if len(strings.TrimSpace(performer.URL)) > 0 { - urls = append(urls, strings.TrimSpace(performer.URL)) - } - if len(urls) > 0 { - draft.Urls = urls + if len(performer.URLs.List()) > 0 { + draft.Urls = performer.URLs.List() } stashIDs, err := pqb.GetStashIDs(ctx, performer.ID) @@ -1346,7 +1339,7 @@ func (c *Client) submitDraft(ctx context.Context, query string, input interface{ return fmt.Errorf("failed to decode data %s: %w", string(responseBytes), err) } - if respGQL.Errors != nil && len(respGQL.Errors) > 0 { + if len(respGQL.Errors) > 0 { // try to parse standard graphql error errors := &client.GqlErrorList{} if e := json.Unmarshal(responseBytes, errors); e != nil { diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index 29a4b0a1926..299e9b5db69 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -62,15 +62,33 @@ func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCon } q := s.getXPathQuery(doc) + // if these just return the return values from scraper.scrape* functions then + // it ends up returning ScrapedContent(nil) rather than nil switch ty { case ScrapeContentTypePerformer: - return scraper.scrapePerformer(ctx, q) + ret, err := scraper.scrapePerformer(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil case ScrapeContentTypeScene: - return scraper.scrapeScene(ctx, q) + ret, err := scraper.scrapeScene(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil case ScrapeContentTypeGallery: - return scraper.scrapeGallery(ctx, q) - case ScrapeContentTypeMovie: - return scraper.scrapeMovie(ctx, q) + ret, err := scraper.scrapeGallery(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil + case ScrapeContentTypeMovie, ScrapeContentTypeGroup: + ret, err := scraper.scrapeGroup(ctx, q) + if err != nil || ret == nil { + return nil, err + } + return ret, nil } return nil, ErrNotSupported diff --git a/pkg/session/session.go b/pkg/session/session.go index 285c7cc3c7a..66cb39e0923 100644 --- a/pkg/session/session.go +++ b/pkg/session/session.go @@ -1,3 +1,4 @@ +// Package session provides session authentication and management for the application. package session import ( diff --git a/pkg/sliceutil/collections.go b/pkg/sliceutil/collections.go index 81a3deba37c..18930df259e 100644 --- a/pkg/sliceutil/collections.go +++ b/pkg/sliceutil/collections.go @@ -1,3 +1,4 @@ +// Package sliceutil provides utilities for working with slices. package sliceutil // Index returns the first index of the provided value in the provided @@ -145,7 +146,7 @@ func Filter[T any](vs []T, f func(T) bool) []T { return ret } -// Filter returns the result of applying f to each element of the vs slice. +// Map returns the result of applying f to each element of the vs slice. func Map[T any, V any](vs []T, f func(T) V) []V { ret := make([]V, len(vs)) for i, v := range vs { diff --git a/pkg/sqlite/anonymise.go b/pkg/sqlite/anonymise.go index 7e4efd70299..78c5f4ab1a6 100644 --- a/pkg/sqlite/anonymise.go +++ b/pkg/sqlite/anonymise.go @@ -47,6 +47,8 @@ func (db *Anonymiser) Anonymise(ctx context.Context) error { return utils.Do([]func() error{ func() error { return db.deleteBlobs() }, func() error { return db.deleteStashIDs() }, + func() error { return db.clearOHistory() }, + func() error { return db.clearWatchHistory() }, func() error { return db.anonymiseFolders(ctx) }, func() error { return db.anonymiseFiles(ctx) }, func() error { return db.anonymiseFingerprints(ctx) }, @@ -57,7 +59,7 @@ func (db *Anonymiser) Anonymise(ctx context.Context) error { func() error { return db.anonymisePerformers(ctx) }, func() error { return db.anonymiseStudios(ctx) }, func() error { return db.anonymiseTags(ctx) }, - func() error { return db.anonymiseMovies(ctx) }, + func() error { return db.anonymiseGroups(ctx) }, func() error { return db.Optimise(ctx) }, }) }(); err != nil { @@ -82,14 +84,14 @@ func (db *Anonymiser) truncateTable(tableName string) error { func (db *Anonymiser) deleteBlobs() error { return utils.Do([]func() error{ - func() error { return db.truncateColumn("tags", "image_blob") }, - func() error { return db.truncateColumn("studios", "image_blob") }, - func() error { return db.truncateColumn("performers", "image_blob") }, - func() error { return db.truncateColumn("scenes", "cover_blob") }, - func() error { return db.truncateColumn("movies", "front_image_blob") }, - func() error { return db.truncateColumn("movies", "back_image_blob") }, - - func() error { return db.truncateTable("blobs") }, + func() error { return db.truncateColumn(tagTable, tagImageBlobColumn) }, + func() error { return db.truncateColumn(studioTable, studioImageBlobColumn) }, + func() error { return db.truncateColumn(performerTable, performerImageBlobColumn) }, + func() error { return db.truncateColumn(sceneTable, sceneCoverBlobColumn) }, + func() error { return db.truncateColumn(groupTable, groupFrontImageBlobColumn) }, + func() error { return db.truncateColumn(groupTable, groupBackImageBlobColumn) }, + + func() error { return db.truncateTable(blobTable) }, }) } @@ -101,6 +103,18 @@ func (db *Anonymiser) deleteStashIDs() error { }) } +func (db *Anonymiser) clearOHistory() error { + return utils.Do([]func() error{ + func() error { return db.truncateTable(scenesODatesTable) }, + }) +} + +func (db *Anonymiser) clearWatchHistory() error { + return utils.Do([]func() error{ + func() error { return db.truncateTable(scenesViewDatesTable) }, + }) +} + func (db *Anonymiser) anonymiseFolders(ctx context.Context) error { logger.Infof("Anonymising folders") return txn.WithTxn(ctx, db, func(ctx context.Context) error { @@ -495,9 +509,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error { table.Col(idColumn), table.Col("name"), table.Col("details"), - table.Col("url"), - table.Col("twitter"), - table.Col("instagram"), table.Col("tattoos"), table.Col("piercings"), ).Where(table.Col(idColumn).Gt(lastID)).Limit(1000) @@ -510,9 +521,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error { id int name sql.NullString details sql.NullString - url sql.NullString - twitter sql.NullString - instagram sql.NullString tattoos sql.NullString piercings sql.NullString ) @@ -521,9 +529,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error { &id, &name, &details, - &url, - &twitter, - &instagram, &tattoos, &piercings, ); err != nil { @@ -533,9 +538,6 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error { set := goqu.Record{} db.obfuscateNullString(set, "name", name) db.obfuscateNullString(set, "details", details) - db.obfuscateNullString(set, "url", url) - db.obfuscateNullString(set, "twitter", twitter) - db.obfuscateNullString(set, "instagram", instagram) db.obfuscateNullString(set, "tattoos", tattoos) db.obfuscateNullString(set, "piercings", piercings) @@ -566,6 +568,10 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error { return err } + if err := db.anonymiseURLs(ctx, goqu.T(performerURLsTable), "performer_id"); err != nil { + return err + } + return nil } @@ -833,9 +839,9 @@ func (db *Anonymiser) anonymiseTags(ctx context.Context) error { return nil } -func (db *Anonymiser) anonymiseMovies(ctx context.Context) error { - logger.Infof("Anonymising movies") - table := movieTableMgr.table +func (db *Anonymiser) anonymiseGroups(ctx context.Context) error { + logger.Infof("Anonymising groups") + table := groupTableMgr.table lastID := 0 total := 0 const logEvery = 10000 @@ -846,8 +852,7 @@ func (db *Anonymiser) anonymiseMovies(ctx context.Context) error { table.Col(idColumn), table.Col("name"), table.Col("aliases"), - table.Col("synopsis"), - table.Col("url"), + table.Col("description"), table.Col("director"), ).Where(table.Col(idColumn).Gt(lastID)).Limit(1000) @@ -856,20 +861,18 @@ func (db *Anonymiser) anonymiseMovies(ctx context.Context) error { const single = false return queryFunc(ctx, query, single, func(rows *sqlx.Rows) error { var ( - id int - name sql.NullString - aliases sql.NullString - synopsis sql.NullString - url sql.NullString - director sql.NullString + id int + name sql.NullString + aliases sql.NullString + description sql.NullString + director sql.NullString ) if err := rows.Scan( &id, &name, &aliases, - &synopsis, - &url, + &description, &director, ); err != nil { return err @@ -878,8 +881,7 @@ func (db *Anonymiser) anonymiseMovies(ctx context.Context) error { set := goqu.Record{} db.obfuscateNullString(set, "name", name) db.obfuscateNullString(set, "aliases", aliases) - db.obfuscateNullString(set, "synopsis", synopsis) - db.obfuscateNullString(set, "url", url) + db.obfuscateNullString(set, "description", description) db.obfuscateNullString(set, "director", director) if len(set) > 0 { @@ -895,7 +897,7 @@ func (db *Anonymiser) anonymiseMovies(ctx context.Context) error { total++ if total%logEvery == 0 { - logger.Infof("Anonymised %d movies", total) + logger.Infof("Anonymised %d groups", total) } return nil @@ -905,6 +907,10 @@ func (db *Anonymiser) anonymiseMovies(ctx context.Context) error { } } + if err := db.anonymiseURLs(ctx, goqu.T(groupURLsTable), "group_id"); err != nil { + return err + } + return nil } diff --git a/pkg/sqlite/blob.go b/pkg/sqlite/blob.go index 31b406fc552..241b63d23cf 100644 --- a/pkg/sqlite/blob.go +++ b/pkg/sqlite/blob.go @@ -346,8 +346,8 @@ func (qb *BlobStore) delete(ctx context.Context, checksum string) error { } type blobJoinQueryBuilder struct { - repository - blobStore *BlobStore + repository repository + blobStore *BlobStore joinTable string } @@ -381,7 +381,7 @@ func (qb *blobJoinQueryBuilder) UpdateImage(ctx context.Context, id int, blobCol } sqlQuery := fmt.Sprintf("UPDATE %s SET %s = ? WHERE id = ?", qb.joinTable, blobCol) - if _, err := qb.tx.Exec(ctx, sqlQuery, checksum, id); err != nil { + if _, err := dbWrapper.Exec(ctx, sqlQuery, checksum, id); err != nil { return err } @@ -428,7 +428,7 @@ func (qb *blobJoinQueryBuilder) DestroyImage(ctx context.Context, id int, blobCo } updateQuery := fmt.Sprintf("UPDATE %s SET %s = NULL WHERE id = ?", qb.joinTable, blobCol) - if _, err = qb.tx.Exec(ctx, updateQuery, id); err != nil { + if _, err = dbWrapper.Exec(ctx, updateQuery, id); err != nil { return err } @@ -441,7 +441,7 @@ func (qb *blobJoinQueryBuilder) HasImage(ctx context.Context, id int, blobCol st "joinCol": blobCol, }) - c, err := qb.runCountQuery(ctx, stmt, []interface{}{id}) + c, err := qb.repository.runCountQuery(ctx, stmt, []interface{}{id}) if err != nil { return false, err } diff --git a/pkg/sqlite/blob_test.go b/pkg/sqlite/blob_test.go index 4c6e0ccc277..10c2b93fe4b 100644 --- a/pkg/sqlite/blob_test.go +++ b/pkg/sqlite/blob_test.go @@ -12,7 +12,7 @@ import ( ) type updateImageFunc func(ctx context.Context, id int, image []byte) error -type getImageFunc func(ctx context.Context, movieID int) ([]byte, error) +type getImageFunc func(ctx context.Context, id int) ([]byte, error) func testUpdateImage(t *testing.T, ctx context.Context, id int, updateFn updateImageFunc, getFn getImageFunc) error { image := []byte("image") diff --git a/pkg/sqlite/criterion_handlers.go b/pkg/sqlite/criterion_handlers.go index 5718947cbe8..e021bd1759b 100644 --- a/pkg/sqlite/criterion_handlers.go +++ b/pkg/sqlite/criterion_handlers.go @@ -2,13 +2,308 @@ package sqlite import ( "context" + "database/sql" "fmt" + "path/filepath" + "regexp" + "strconv" + "strings" "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" ) +type criterionHandler interface { + handle(ctx context.Context, f *filterBuilder) +} + +type criterionHandlerFunc func(ctx context.Context, f *filterBuilder) + +func (h criterionHandlerFunc) handle(ctx context.Context, f *filterBuilder) { + h(ctx, f) +} + +type compoundHandler []criterionHandler + +func (h compoundHandler) handle(ctx context.Context, f *filterBuilder) { + for _, h := range h { + h.handle(ctx, f) + } +} + // shared criterion handlers go here +func stringCriterionHandler(c *models.StringCriterionInput, column string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + if modifier := c.Modifier; c.Modifier.IsValid() { + switch modifier { + case models.CriterionModifierIncludes: + f.whereClauses = append(f.whereClauses, getStringSearchClause([]string{column}, c.Value, false)) + case models.CriterionModifierExcludes: + f.whereClauses = append(f.whereClauses, getStringSearchClause([]string{column}, c.Value, true)) + case models.CriterionModifierEquals: + f.addWhere(column+" LIKE ?", c.Value) + case models.CriterionModifierNotEquals: + f.addWhere(column+" NOT LIKE ?", c.Value) + case models.CriterionModifierMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + f.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column), c.Value) + case models.CriterionModifierNotMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + f.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column), c.Value) + case models.CriterionModifierIsNull: + f.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')") + case models.CriterionModifierNotNull: + f.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')") + default: + panic("unsupported string filter modifier") + } + } + } + } +} + +func enumCriterionHandler(modifier models.CriterionModifier, values []string, column string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if modifier.IsValid() { + switch modifier { + case models.CriterionModifierIncludes, models.CriterionModifierEquals: + if len(values) > 0 { + f.whereClauses = append(f.whereClauses, getEnumSearchClause(column, values, false)) + } + case models.CriterionModifierExcludes, models.CriterionModifierNotEquals: + if len(values) > 0 { + f.whereClauses = append(f.whereClauses, getEnumSearchClause(column, values, true)) + } + case models.CriterionModifierIsNull: + f.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')") + case models.CriterionModifierNotNull: + f.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')") + default: + panic("unsupported string filter modifier") + } + } + } +} + +func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, basenameColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + if addJoinFn != nil { + addJoinFn(f) + } + addWildcards := true + not := false + + if modifier := c.Modifier; c.Modifier.IsValid() { + switch modifier { + case models.CriterionModifierIncludes: + f.whereClauses = append(f.whereClauses, getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not)) + case models.CriterionModifierExcludes: + not = true + f.whereClauses = append(f.whereClauses, getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not)) + case models.CriterionModifierEquals: + addWildcards = false + f.whereClauses = append(f.whereClauses, getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not)) + case models.CriterionModifierNotEquals: + addWildcards = false + not = true + f.whereClauses = append(f.whereClauses, getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not)) + case models.CriterionModifierMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) + f.addWhere(fmt.Sprintf("%s IS NOT NULL AND %s IS NOT NULL AND %s regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) + case models.CriterionModifierNotMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) + f.addWhere(fmt.Sprintf("%s IS NULL OR %s IS NULL OR %s NOT regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) + case models.CriterionModifierIsNull: + f.addWhere(fmt.Sprintf("%s IS NULL OR TRIM(%[1]s) = '' OR %s IS NULL OR TRIM(%[2]s) = ''", pathColumn, basenameColumn)) + case models.CriterionModifierNotNull: + f.addWhere(fmt.Sprintf("%s IS NOT NULL AND TRIM(%[1]s) != '' AND %s IS NOT NULL AND TRIM(%[2]s) != ''", pathColumn, basenameColumn)) + default: + panic("unsupported string filter modifier") + } + } + } + } +} + +func getPathSearchClause(pathColumn, basenameColumn, p string, addWildcards, not bool) sqlClause { + if addWildcards { + p = "%" + p + "%" + } + + filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) + ret := makeClause(fmt.Sprintf("%s LIKE ?", filepathColumn), p) + + if not { + ret = ret.not() + } + + return ret +} + +// getPathSearchClauseMany splits the query string p on whitespace +// Used for backwards compatibility for the includes/excludes modifiers +func getPathSearchClauseMany(pathColumn, basenameColumn, p string, addWildcards, not bool) sqlClause { + q := strings.TrimSpace(p) + trimmedQuery := strings.Trim(q, "\"") + + if trimmedQuery == q { + q = regexp.MustCompile(`\s+`).ReplaceAllString(q, " ") + queryWords := strings.Split(q, " ") + + var ret []sqlClause + // Search for any word + for _, word := range queryWords { + ret = append(ret, getPathSearchClause(pathColumn, basenameColumn, word, addWildcards, not)) + } + + if !not { + return orClauses(ret...) + } + + return andClauses(ret...) + } + + return getPathSearchClause(pathColumn, basenameColumn, trimmedQuery, addWildcards, not) +} + +func intCriterionHandler(c *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + if addJoinFn != nil { + addJoinFn(f) + } + clause, args := getIntCriterionWhereClause(column, *c) + f.addWhere(clause, args...) + } + } +} + +func floatCriterionHandler(c *models.FloatCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + if addJoinFn != nil { + addJoinFn(f) + } + clause, args := getFloatCriterionWhereClause(column, *c) + f.addWhere(clause, args...) + } + } +} + +func floatIntCriterionHandler(durationFilter *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if durationFilter != nil { + if addJoinFn != nil { + addJoinFn(f) + } + clause, args := getIntCriterionWhereClause("cast("+column+" as int)", *durationFilter) + f.addWhere(clause, args...) + } + } +} + +func boolCriterionHandler(c *bool, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + if addJoinFn != nil { + addJoinFn(f) + } + var v string + if *c { + v = "1" + } else { + v = "0" + } + + f.addWhere(column + " = " + v) + } + } +} + +type dateCriterionHandler struct { + c *models.DateCriterionInput + column string + joinFn func(f *filterBuilder) +} + +func (h *dateCriterionHandler) handle(ctx context.Context, f *filterBuilder) { + if h.c != nil { + if h.joinFn != nil { + h.joinFn(f) + } + clause, args := getDateCriterionWhereClause(h.column, *h.c) + f.addWhere(clause, args...) + } +} + +type timestampCriterionHandler struct { + c *models.TimestampCriterionInput + column string + joinFn func(f *filterBuilder) +} + +func (h *timestampCriterionHandler) handle(ctx context.Context, f *filterBuilder) { + if h.c != nil { + if h.joinFn != nil { + h.joinFn(f) + } + clause, args := getTimestampCriterionWhereClause(h.column, *h.c) + f.addWhere(clause, args...) + } +} + +func yearFilterCriterionHandler(year *models.IntCriterionInput, col string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if year != nil && year.Modifier.IsValid() { + clause, args := getIntCriterionWhereClause("cast(strftime('%Y', "+col+") as int)", *year) + f.addWhere(clause, args...) + } + } +} + +func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if resolution != nil && resolution.Value.IsValid() { + if addJoinFn != nil { + addJoinFn(f) + } + + min := resolution.Value.GetMinResolution() + max := resolution.Value.GetMaxResolution() + + widthHeight := fmt.Sprintf("MIN(%s, %s)", widthColumn, heightColumn) + + switch resolution.Modifier { + case models.CriterionModifierEquals: + f.addWhere(fmt.Sprintf("%s BETWEEN %d AND %d", widthHeight, min, max)) + case models.CriterionModifierNotEquals: + f.addWhere(fmt.Sprintf("%s NOT BETWEEN %d AND %d", widthHeight, min, max)) + case models.CriterionModifierLessThan: + f.addWhere(fmt.Sprintf("%s < %d", widthHeight, min)) + case models.CriterionModifierGreaterThan: + f.addWhere(fmt.Sprintf("%s > %d", widthHeight, max)) + } + } + } +} + func orientationCriterionHandler(orientation *models.OrientationCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { return func(ctx context.Context, f *filterBuilder) { if orientation != nil { @@ -41,3 +336,723 @@ func orientationCriterionHandler(orientation *models.OrientationCriterionInput, } } } + +// handle for MultiCriterion where there is a join table between the new +// objects +type joinedMultiCriterionHandlerBuilder struct { + // table containing the primary objects + primaryTable string + // table joining primary and foreign objects + joinTable string + // alias for join table, if required + joinAs string + // foreign key of the primary object on the join table + primaryFK string + // foreign key of the foreign object on the join table + foreignFK string + + addJoinTable func(f *filterBuilder) +} + +func (m *joinedMultiCriterionHandlerBuilder) handler(c *models.MultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + // make local copy so we can modify it + criterion := *c + + joinAlias := m.joinAs + if joinAlias == "" { + joinAlias = m.joinTable + } + + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + m.addJoinTable(f) + + f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ + "table": joinAlias, + "column": m.foreignFK, + "not": notClause, + })) + return + } + + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { + return + } + + // combine excludes if excludes modifier is selected + if criterion.Modifier == models.CriterionModifierExcludes { + criterion.Modifier = models.CriterionModifierIncludesAll + criterion.Excludes = append(criterion.Excludes, criterion.Value...) + criterion.Value = nil + } + + if len(criterion.Value) > 0 { + whereClause := "" + havingClause := "" + + var args []interface{} + for _, tagID := range criterion.Value { + args = append(args, tagID) + } + + switch criterion.Modifier { + case models.CriterionModifierIncludes: + // includes any of the provided ids + m.addJoinTable(f) + whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) + case models.CriterionModifierEquals: + // includes only the provided ids + m.addJoinTable(f) + whereClause = utils.StrFormat("{joinAlias}.{foreignFK} IN {inBinding} AND (SELECT COUNT(*) FROM {joinTable} s WHERE s.{primaryFK} = {primaryTable}.id) = ?", utils.StrFormatMap{ + "joinAlias": joinAlias, + "foreignFK": m.foreignFK, + "inBinding": getInBinding(len(criterion.Value)), + "joinTable": m.joinTable, + "primaryFK": m.primaryFK, + "primaryTable": m.primaryTable, + }) + havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) + args = append(args, len(criterion.Value)) + case models.CriterionModifierNotEquals: + f.setError(fmt.Errorf("not equals modifier is not supported for multi criterion input")) + case models.CriterionModifierIncludesAll: + // includes all of the provided ids + m.addJoinTable(f) + whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) + havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) + } + + f.addWhere(whereClause, args...) + f.addHaving(havingClause) + } + + if len(criterion.Excludes) > 0 { + var args []interface{} + for _, tagID := range criterion.Excludes { + args = append(args, tagID) + } + + // excludes all of the provided ids + // need to use actual join table name for this + // .id NOT IN (select . from where . in ) + whereClause := fmt.Sprintf("%[1]s.id NOT IN (SELECT %[3]s.%[2]s from %[3]s where %[3]s.%[4]s in %[5]s)", m.primaryTable, m.primaryFK, m.joinTable, m.foreignFK, getInBinding(len(criterion.Excludes))) + + f.addWhere(whereClause, args...) + } + } + } +} + +type multiCriterionHandlerBuilder struct { + primaryTable string + foreignTable string + joinTable string + primaryFK string + foreignFK string + + // function that will be called to perform any necessary joins + addJoinsFunc func(f *filterBuilder) +} + +func (m *multiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if criterion != nil { + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + table := m.primaryTable + if m.joinTable != "" { + table = m.joinTable + f.addLeftJoin(table, "", fmt.Sprintf("%s.%s = %s.id", table, m.primaryFK, m.primaryTable)) + } + + f.addWhere(fmt.Sprintf("%s.%s IS %s NULL", table, m.foreignFK, notClause)) + return + } + + if len(criterion.Value) == 0 { + return + } + + var args []interface{} + for _, tagID := range criterion.Value { + args = append(args, tagID) + } + + if m.addJoinsFunc != nil { + m.addJoinsFunc(f) + } + + whereClause, havingClause := getMultiCriterionClause(m.primaryTable, m.foreignTable, m.joinTable, m.primaryFK, m.foreignFK, criterion) + f.addWhere(whereClause, args...) + f.addHaving(havingClause) + } + } +} + +type countCriterionHandlerBuilder struct { + primaryTable string + joinTable string + primaryFK string +} + +func (m *countCriterionHandlerBuilder) handler(criterion *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if criterion != nil { + clause, args := getCountCriterionClause(m.primaryTable, m.joinTable, m.primaryFK, *criterion) + + f.addWhere(clause, args...) + } + } +} + +// handler for StringCriterion for string list fields +type stringListCriterionHandlerBuilder struct { + primaryTable string + // foreign key of the primary object on the join table + primaryFK string + // table joining primary and foreign objects + joinTable string + // string field on the join table + stringColumn string + + addJoinTable func(f *filterBuilder) + excludeHandler func(f *filterBuilder, criterion *models.StringCriterionInput) +} + +func (m *stringListCriterionHandlerBuilder) handler(criterion *models.StringCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if criterion != nil { + if criterion.Modifier == models.CriterionModifierExcludes { + // special handling for excludes + if m.excludeHandler != nil { + m.excludeHandler(f, criterion) + return + } + + // excludes all of the provided values + // need to use actual join table name for this + // .id NOT IN (select . from where . in ) + whereClause := utils.StrFormat("{primaryTable}.id NOT IN (SELECT {joinTable}.{primaryFK} from {joinTable} where {joinTable}.{stringColumn} LIKE ?)", + utils.StrFormatMap{ + "primaryTable": m.primaryTable, + "joinTable": m.joinTable, + "primaryFK": m.primaryFK, + "stringColumn": m.stringColumn, + }, + ) + + f.addWhere(whereClause, "%"+criterion.Value+"%") + + // TODO - should we also exclude null values? + // m.addJoinTable(f) + // stringCriterionHandler(&models.StringCriterionInput{ + // Modifier: models.CriterionModifierNotNull, + // }, m.joinTable+"."+m.stringColumn)(ctx, f) + } else { + m.addJoinTable(f) + stringCriterionHandler(criterion, m.joinTable+"."+m.stringColumn)(ctx, f) + } + } + } +} + +func studioCriterionHandler(primaryTable string, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if studios == nil { + return + } + + studiosCopy := *studios + switch studiosCopy.Modifier { + case models.CriterionModifierEquals: + studiosCopy.Modifier = models.CriterionModifierIncludesAll + case models.CriterionModifierNotEquals: + studiosCopy.Modifier = models.CriterionModifierExcludes + } + + hh := hierarchicalMultiCriterionHandlerBuilder{ + primaryTable: primaryTable, + foreignTable: studioTable, + foreignFK: studioIDColumn, + parentFK: "parent_id", + } + + hh.handler(&studiosCopy)(ctx, f) + } +} + +type hierarchicalMultiCriterionHandlerBuilder struct { + primaryTable string + foreignTable string + foreignFK string + + parentFK string + childFK string + relationsTable string +} + +func getHierarchicalValues(ctx context.Context, values []string, table, relationsTable, parentFK string, childFK string, depth *int) (string, error) { + var args []interface{} + + if parentFK == "" { + parentFK = "parent_id" + } + if childFK == "" { + childFK = "child_id" + } + + depthVal := 0 + if depth != nil { + depthVal = *depth + } + + if depthVal == 0 { + valid := true + var valuesClauses []string + for _, value := range values { + id, err := strconv.Atoi(value) + // In case of invalid value just run the query. + // Building VALUES() based on provided values just saves a query when depth is 0. + if err != nil { + valid = false + break + } + + valuesClauses = append(valuesClauses, fmt.Sprintf("(%d,%d)", id, id)) + } + + if valid { + return "VALUES" + strings.Join(valuesClauses, ","), nil + } + } + + for _, value := range values { + args = append(args, value) + } + inCount := len(args) + + var depthCondition string + if depthVal != -1 { + depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) + } + + withClauseMap := utils.StrFormatMap{ + "table": table, + "relationsTable": relationsTable, + "inBinding": getInBinding(inCount), + "recursiveSelect": "", + "parentFK": parentFK, + "childFK": childFK, + "depthCondition": depthCondition, + "unionClause": "", + } + + if relationsTable != "" { + withClauseMap["recursiveSelect"] = utils.StrFormat(`SELECT p.root_id, c.{childFK}, depth + 1 FROM {relationsTable} AS c +INNER JOIN items as p ON c.{parentFK} = p.item_id +`, withClauseMap) + } else { + withClauseMap["recursiveSelect"] = utils.StrFormat(`SELECT p.root_id, c.id, depth + 1 FROM {table} as c +INNER JOIN items as p ON c.{parentFK} = p.item_id +`, withClauseMap) + } + + if depthVal != 0 { + withClauseMap["unionClause"] = utils.StrFormat(` +UNION {recursiveSelect} {depthCondition} +`, withClauseMap) + } + + withClause := utils.StrFormat(`items AS ( +SELECT id as root_id, id as item_id, 0 as depth FROM {table} +WHERE id in {inBinding} +{unionClause}) +`, withClauseMap) + + query := fmt.Sprintf("WITH RECURSIVE %s SELECT 'VALUES' || GROUP_CONCAT('(' || root_id || ', ' || item_id || ')') AS val FROM items", withClause) + + var valuesClause sql.NullString + err := dbWrapper.Get(ctx, &valuesClause, query, args...) + if err != nil { + return "", fmt.Errorf("failed to get hierarchical values: %w", err) + } + + // if no values are found, just return a values string with the values only + if !valuesClause.Valid { + for i, value := range values { + values[i] = fmt.Sprintf("(%s, %s)", value, value) + } + valuesClause.String = "VALUES" + strings.Join(values, ",") + } + + return valuesClause.String, nil +} + +func addHierarchicalConditionClauses(f *filterBuilder, criterion models.HierarchicalMultiCriterionInput, table, idColumn string) { + switch criterion.Modifier { + case models.CriterionModifierIncludes: + f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) + case models.CriterionModifierIncludesAll: + f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) + f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", table, idColumn, len(criterion.Value))) + case models.CriterionModifierExcludes: + f.addWhere(fmt.Sprintf("%s.%s IS NULL", table, idColumn)) + } +} + +func (m *hierarchicalMultiCriterionHandlerBuilder) handler(c *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + // make a copy so we don't modify the original + criterion := *c + + // don't support equals/not equals + if criterion.Modifier == models.CriterionModifierEquals || criterion.Modifier == models.CriterionModifierNotEquals { + f.setError(fmt.Errorf("modifier %s is not supported for hierarchical multi criterion", criterion.Modifier)) + return + } + + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ + "table": m.primaryTable, + "column": m.foreignFK, + "not": notClause, + })) + return + } + + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { + return + } + + // combine excludes if excludes modifier is selected + if criterion.Modifier == models.CriterionModifierExcludes { + criterion.Modifier = models.CriterionModifierIncludesAll + criterion.Excludes = append(criterion.Excludes, criterion.Value...) + criterion.Value = nil + } + + if len(criterion.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } + + switch criterion.Modifier { + case models.CriterionModifierIncludes: + f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) + case models.CriterionModifierIncludesAll: + f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) + f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", m.primaryTable, m.foreignFK, len(criterion.Value))) + } + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Excludes, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } + + f.addWhere(fmt.Sprintf("%s.%s NOT IN (SELECT column2 FROM (%s)) OR %[1]s.%[2]s IS NULL", m.primaryTable, m.foreignFK, valuesClause)) + } + } + } +} + +type joinedHierarchicalMultiCriterionHandlerBuilder struct { + primaryTable string + primaryKey string + foreignTable string + foreignFK string + + parentFK string + childFK string + relationsTable string + + joinAs string + joinTable string + primaryFK string +} + +func (m *joinedHierarchicalMultiCriterionHandlerBuilder) addHierarchicalConditionClauses(f *filterBuilder, criterion models.HierarchicalMultiCriterionInput, table, idColumn string) { + primaryKey := m.primaryKey + if primaryKey == "" { + primaryKey = "id" + } + + switch criterion.Modifier { + case models.CriterionModifierEquals: + // includes only the provided ids + f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) + f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", table, idColumn, len(criterion.Value))) + f.addWhere(utils.StrFormat("(SELECT COUNT(*) FROM {joinTable} s WHERE s.{primaryFK} = {primaryTable}.{primaryKey}) = ?", utils.StrFormatMap{ + "joinTable": m.joinTable, + "primaryFK": m.primaryFK, + "primaryTable": m.primaryTable, + "primaryKey": primaryKey, + }), len(criterion.Value)) + case models.CriterionModifierNotEquals: + f.setError(fmt.Errorf("not equals modifier is not supported for hierarchical multi criterion input")) + default: + addHierarchicalConditionClauses(f, criterion, table, idColumn) + } +} + +func (m *joinedHierarchicalMultiCriterionHandlerBuilder) handler(c *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + // make a copy so we don't modify the original + criterion := *c + joinAlias := m.joinAs + primaryKey := m.primaryKey + if primaryKey == "" { + primaryKey = "id" + } + + if criterion.Modifier == models.CriterionModifierEquals && criterion.Depth != nil && *criterion.Depth != 0 { + f.setError(fmt.Errorf("depth is not supported for equals modifier in hierarchical multi criterion input")) + return + } + + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addLeftJoin(m.joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.%s", joinAlias, m.primaryFK, m.primaryTable, primaryKey)) + + f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ + "table": joinAlias, + "column": m.foreignFK, + "not": notClause, + })) + return + } + + // combine excludes if excludes modifier is selected + if criterion.Modifier == models.CriterionModifierExcludes { + criterion.Modifier = models.CriterionModifierIncludesAll + criterion.Excludes = append(criterion.Excludes, criterion.Value...) + criterion.Value = nil + } + + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { + return + } + + if len(criterion.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } + + joinTable := utils.StrFormat(`( + SELECT j.*, d.column1 AS root_id, d.column2 AS item_id FROM {joinTable} AS j + INNER JOIN ({valuesClause}) AS d ON j.{foreignFK} = d.column2 + ) + `, utils.StrFormatMap{ + "joinTable": m.joinTable, + "foreignFK": m.foreignFK, + "valuesClause": valuesClause, + }) + + f.addLeftJoin(joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.%s", joinAlias, m.primaryFK, m.primaryTable, primaryKey)) + + m.addHierarchicalConditionClauses(f, criterion, joinAlias, "root_id") + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Excludes, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) + if err != nil { + f.setError(err) + return + } + + joinTable := utils.StrFormat(`( + SELECT j2.*, e.column1 AS root_id, e.column2 AS item_id FROM {joinTable} AS j2 + INNER JOIN ({valuesClause}) AS e ON j2.{foreignFK} = e.column2 + ) + `, utils.StrFormatMap{ + "joinTable": m.joinTable, + "foreignFK": m.foreignFK, + "valuesClause": valuesClause, + }) + + joinAlias2 := joinAlias + "2" + + f.addLeftJoin(joinTable, joinAlias2, fmt.Sprintf("%s.%s = %s.%s", joinAlias2, m.primaryFK, m.primaryTable, primaryKey)) + + // modify for exclusion + criterionCopy := criterion + criterionCopy.Modifier = models.CriterionModifierExcludes + criterionCopy.Value = c.Excludes + + m.addHierarchicalConditionClauses(f, criterionCopy, joinAlias2, "root_id") + } + } + } +} + +type joinedPerformerTagsHandler struct { + criterion *models.HierarchicalMultiCriterionInput + + primaryTable string // eg scenes + joinTable string // eg performers_scenes + joinPrimaryKey string // eg scene_id +} + +func (h *joinedPerformerTagsHandler) handle(ctx context.Context, f *filterBuilder) { + tags := h.criterion + + if tags != nil { + criterion := tags.CombineExcludes() + + // validate the modifier + switch criterion.Modifier { + case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: + // valid + default: + f.setError(fmt.Errorf("invalid modifier %s for performer tags", criterion.Modifier)) + } + + strFormatMap := utils.StrFormatMap{ + "primaryTable": h.primaryTable, + "joinTable": h.joinTable, + "joinPrimaryKey": h.joinPrimaryKey, + "inBinding": getInBinding(len(criterion.Value)), + } + + if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { + var notClause string + if criterion.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addLeftJoin(h.joinTable, "", utils.StrFormat("{primaryTable}.id = {joinTable}.{joinPrimaryKey}", strFormatMap)) + f.addLeftJoin("performers_tags", "", utils.StrFormat("{joinTable}.performer_id = performers_tags.performer_id", strFormatMap)) + + f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) + return + } + + if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { + return + } + + if len(criterion.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Value, tagTable, "tags_relations", "", "", criterion.Depth) + if err != nil { + f.setError(err) + return + } + + f.addWith(utils.StrFormat(`performer_tags AS ( +SELECT ps.{joinPrimaryKey} as primaryID, t.column1 AS root_tag_id FROM {joinTable} ps +INNER JOIN performers_tags pt ON pt.performer_id = ps.performer_id +INNER JOIN (`+valuesClause+`) t ON t.column2 = pt.tag_id +)`, strFormatMap)) + + f.addLeftJoin("performer_tags", "", utils.StrFormat("performer_tags.primaryID = {primaryTable}.id", strFormatMap)) + + addHierarchicalConditionClauses(f, criterion, "performer_tags", "root_tag_id") + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, criterion.Excludes, tagTable, "tags_relations", "", "", criterion.Depth) + if err != nil { + f.setError(err) + return + } + + clause := utils.StrFormat("{primaryTable}.id NOT IN (SELECT {joinTable}.{joinPrimaryKey} FROM {joinTable} INNER JOIN performers_tags ON {joinTable}.performer_id = performers_tags.performer_id WHERE performers_tags.tag_id IN (SELECT column2 FROM (%s)))", strFormatMap) + f.addWhere(fmt.Sprintf(clause, valuesClause)) + } + } +} + +type stashIDCriterionHandler struct { + c *models.StashIDCriterionInput + stashIDRepository *stashIDRepository + stashIDTableAs string + parentIDCol string +} + +func (h *stashIDCriterionHandler) handle(ctx context.Context, f *filterBuilder) { + if h.c == nil { + return + } + + stashIDRepo := h.stashIDRepository + t := stashIDRepo.tableName + if h.stashIDTableAs != "" { + t = h.stashIDTableAs + } + + joinClause := fmt.Sprintf("%s.%s = %s", t, stashIDRepo.idColumn, h.parentIDCol) + if h.c.Endpoint != nil && *h.c.Endpoint != "" { + joinClause += fmt.Sprintf(" AND %s.endpoint = '%s'", t, *h.c.Endpoint) + } + + f.addLeftJoin(stashIDRepo.tableName, h.stashIDTableAs, joinClause) + + v := "" + if h.c.StashID != nil { + v = *h.c.StashID + } + + stringCriterionHandler(&models.StringCriterionInput{ + Value: v, + Modifier: h.c.Modifier, + }, t+".stash_id")(ctx, f) +} + +type relatedFilterHandler struct { + relatedIDCol string + relatedRepo repository + relatedHandler criterionHandler + joinFn func(f *filterBuilder) +} + +func (h *relatedFilterHandler) handle(ctx context.Context, f *filterBuilder) { + ff := filterBuilderFromHandler(ctx, h.relatedHandler) + if ff.err != nil { + f.setError(ff.err) + return + } + + if ff.empty() { + return + } + + subQuery := h.relatedRepo.newQuery() + selectIDs(&subQuery, subQuery.repository.tableName) + if err := subQuery.addFilter(ff); err != nil { + f.setError(err) + return + } + + if h.joinFn != nil { + h.joinFn(f) + } + + f.addWhere(fmt.Sprintf("%s IN ("+subQuery.toSQL(false)+")", h.relatedIDCol), subQuery.args...) +} diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go index 90d3706a544..7dd4771d33f 100644 --- a/pkg/sqlite/database.go +++ b/pkg/sqlite/database.go @@ -30,7 +30,7 @@ const ( dbConnTimeout = 30 ) -var appSchemaVersion uint = 58 +var appSchemaVersion uint = 67 //go:embed migrations/*.sql var migrationsBox embed.FS @@ -61,7 +61,7 @@ func (e *MismatchedSchemaVersionError) Error() string { return fmt.Sprintf("schema version %d is incompatible with required schema version %d", e.CurrentSchemaVersion, e.RequiredSchemaVersion) } -type Database struct { +type storeRepository struct { Blobs *BlobStore File *FileStore Folder *FolderStore @@ -74,7 +74,11 @@ type Database struct { SavedFilter *SavedFilterStore Studio *StudioStore Tag *TagStore - Movie *MovieStore + Group *GroupStore +} + +type Database struct { + *storeRepository db *sqlx.DB dbPath string @@ -87,23 +91,32 @@ type Database struct { func NewDatabase() *Database { fileStore := NewFileStore() folderStore := NewFolderStore() + galleryStore := NewGalleryStore(fileStore, folderStore) blobStore := NewBlobStore(BlobStoreOptions{}) + performerStore := NewPerformerStore(blobStore) + studioStore := NewStudioStore(blobStore) + tagStore := NewTagStore(blobStore) - ret := &Database{ + r := &storeRepository{} + *r = storeRepository{ Blobs: blobStore, File: fileStore, Folder: folderStore, - Scene: NewSceneStore(fileStore, blobStore), + Scene: NewSceneStore(r, blobStore), SceneMarker: NewSceneMarkerStore(), - Image: NewImageStore(fileStore), - Gallery: NewGalleryStore(fileStore, folderStore), + Image: NewImageStore(r), + Gallery: galleryStore, GalleryChapter: NewGalleryChapterStore(), - Performer: NewPerformerStore(blobStore), - Studio: NewStudioStore(blobStore), - Tag: NewTagStore(blobStore), - Movie: NewMovieStore(blobStore), + Performer: performerStore, + Studio: studioStore, + Tag: tagStore, + Group: NewGroupStore(blobStore), SavedFilter: NewSavedFilterStore(), - lockChan: make(chan struct{}, 1), + } + + ret := &Database{ + storeRepository: r, + lockChan: make(chan struct{}, 1), } return ret @@ -370,7 +383,7 @@ func (db *Database) Analyze(ctx context.Context) error { } func (db *Database) ExecSQL(ctx context.Context, query string, args []interface{}) (*int64, *int64, error) { - wrapper := dbWrapper{} + wrapper := dbWrapperType{} result, err := wrapper.Exec(ctx, query, args...) if err != nil { @@ -393,7 +406,7 @@ func (db *Database) ExecSQL(ctx context.Context, query string, args []interface{ } func (db *Database) QuerySQL(ctx context.Context, query string, args []interface{}) ([]string, [][]interface{}, error) { - wrapper := dbWrapper{} + wrapper := dbWrapperType{} rows, err := wrapper.QueryxContext(ctx, query, args...) if err != nil && !errors.Is(err, sql.ErrNoRows) { diff --git a/pkg/sqlite/doc.go b/pkg/sqlite/doc.go new file mode 100644 index 00000000000..36472189627 --- /dev/null +++ b/pkg/sqlite/doc.go @@ -0,0 +1,2 @@ +// Package sqlite provides interfaces to interact with the sqlite database. +package sqlite diff --git a/pkg/sqlite/file.go b/pkg/sqlite/file.go index c071320c6bf..6cd74eb34cd 100644 --- a/pkg/sqlite/file.go +++ b/pkg/sqlite/file.go @@ -947,7 +947,6 @@ func (qb *FileStore) setQuerySort(query *queryBuilder, findFilter *models.FindFi func (qb *FileStore) captionRepository() *captionRepository { return &captionRepository{ repository: repository{ - tx: qb.tx, tableName: videoCaptionsTable, idColumn: fileIDColumn, }, diff --git a/pkg/sqlite/filter.go b/pkg/sqlite/filter.go index abf3336a7a3..f4b5e7e7726 100644 --- a/pkg/sqlite/filter.go +++ b/pkg/sqlite/filter.go @@ -2,19 +2,55 @@ package sqlite import ( "context" - "database/sql" "errors" "fmt" - "path/filepath" - "regexp" - "strconv" "strings" - "github.com/stashapp/stash/pkg/utils" - "github.com/stashapp/stash/pkg/models" ) +func illegalFilterCombination(type1, type2 string) error { + return fmt.Errorf("cannot have %s and %s in the same filter", type1, type2) +} + +func validateFilterCombination[T any](sf models.OperatorFilter[T]) error { + const and = "AND" + const or = "OR" + const not = "NOT" + + if sf.And != nil { + if sf.Or != nil { + return illegalFilterCombination(and, or) + } + if sf.Not != nil { + return illegalFilterCombination(and, not) + } + } + + if sf.Or != nil { + if sf.Not != nil { + return illegalFilterCombination(or, not) + } + } + + return nil +} + +func handleSubFilter[T any](ctx context.Context, handler criterionHandler, f *filterBuilder, subFilter models.OperatorFilter[T]) { + subQuery := &filterBuilder{} + handler.handle(ctx, subQuery) + + if subFilter.And != nil { + f.and(subQuery) + } + if subFilter.Or != nil { + f.or(subQuery) + } + if subFilter.Not != nil { + f.not(subQuery) + } +} + type sqlClause struct { sql string args []interface{} @@ -54,16 +90,6 @@ func andClauses(clauses ...sqlClause) sqlClause { return joinClauses("AND", clauses...) } -type criterionHandler interface { - handle(ctx context.Context, f *filterBuilder) -} - -type criterionHandlerFunc func(ctx context.Context, f *filterBuilder) - -func (h criterionHandlerFunc) handle(ctx context.Context, f *filterBuilder) { - h(ctx, f) -} - type join struct { table string as string @@ -143,6 +169,16 @@ type filterBuilder struct { err error } +func (f *filterBuilder) empty() bool { + return f == nil || (len(f.whereClauses) == 0 && len(f.joins) == 0 && len(f.havingClauses) == 0 && f.subFilter == nil) +} + +func filterBuilderFromHandler(ctx context.Context, handler criterionHandler) *filterBuilder { + f := &filterBuilder{} + handler.handle(ctx, f) + return f +} + var errSubFilterAlreadySet = errors.New(`sub-filter already set`) // sub-filter operator values @@ -388,876 +424,3 @@ func (f *filterBuilder) andClauses(input []sqlClause) (string, []interface{}) { return "", nil } - -func stringCriterionHandler(c *models.StringCriterionInput, column string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - if modifier := c.Modifier; c.Modifier.IsValid() { - switch modifier { - case models.CriterionModifierIncludes: - f.whereClauses = append(f.whereClauses, getStringSearchClause([]string{column}, c.Value, false)) - case models.CriterionModifierExcludes: - f.whereClauses = append(f.whereClauses, getStringSearchClause([]string{column}, c.Value, true)) - case models.CriterionModifierEquals: - f.addWhere(column+" LIKE ?", c.Value) - case models.CriterionModifierNotEquals: - f.addWhere(column+" NOT LIKE ?", c.Value) - case models.CriterionModifierMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - f.setError(err) - return - } - f.addWhere(fmt.Sprintf("(%s IS NOT NULL AND %[1]s regexp ?)", column), c.Value) - case models.CriterionModifierNotMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - f.setError(err) - return - } - f.addWhere(fmt.Sprintf("(%s IS NULL OR %[1]s NOT regexp ?)", column), c.Value) - case models.CriterionModifierIsNull: - f.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')") - case models.CriterionModifierNotNull: - f.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')") - default: - panic("unsupported string filter modifier") - } - } - } - } -} - -func enumCriterionHandler(modifier models.CriterionModifier, values []string, column string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if modifier.IsValid() { - switch modifier { - case models.CriterionModifierIncludes, models.CriterionModifierEquals: - if len(values) > 0 { - f.whereClauses = append(f.whereClauses, getEnumSearchClause(column, values, false)) - } - case models.CriterionModifierExcludes, models.CriterionModifierNotEquals: - if len(values) > 0 { - f.whereClauses = append(f.whereClauses, getEnumSearchClause(column, values, true)) - } - case models.CriterionModifierIsNull: - f.addWhere("(" + column + " IS NULL OR TRIM(" + column + ") = '')") - case models.CriterionModifierNotNull: - f.addWhere("(" + column + " IS NOT NULL AND TRIM(" + column + ") != '')") - default: - panic("unsupported string filter modifier") - } - } - } -} - -func pathCriterionHandler(c *models.StringCriterionInput, pathColumn string, basenameColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - if addJoinFn != nil { - addJoinFn(f) - } - addWildcards := true - not := false - - if modifier := c.Modifier; c.Modifier.IsValid() { - switch modifier { - case models.CriterionModifierIncludes: - f.whereClauses = append(f.whereClauses, getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not)) - case models.CriterionModifierExcludes: - not = true - f.whereClauses = append(f.whereClauses, getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not)) - case models.CriterionModifierEquals: - addWildcards = false - f.whereClauses = append(f.whereClauses, getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not)) - case models.CriterionModifierNotEquals: - addWildcards = false - not = true - f.whereClauses = append(f.whereClauses, getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not)) - case models.CriterionModifierMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - f.setError(err) - return - } - filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) - f.addWhere(fmt.Sprintf("%s IS NOT NULL AND %s IS NOT NULL AND %s regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) - case models.CriterionModifierNotMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - f.setError(err) - return - } - filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) - f.addWhere(fmt.Sprintf("%s IS NULL OR %s IS NULL OR %s NOT regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) - case models.CriterionModifierIsNull: - f.addWhere(fmt.Sprintf("%s IS NULL OR TRIM(%[1]s) = '' OR %s IS NULL OR TRIM(%[2]s) = ''", pathColumn, basenameColumn)) - case models.CriterionModifierNotNull: - f.addWhere(fmt.Sprintf("%s IS NOT NULL AND TRIM(%[1]s) != '' AND %s IS NOT NULL AND TRIM(%[2]s) != ''", pathColumn, basenameColumn)) - default: - panic("unsupported string filter modifier") - } - } - } - } -} - -func getPathSearchClause(pathColumn, basenameColumn, p string, addWildcards, not bool) sqlClause { - if addWildcards { - p = "%" + p + "%" - } - - filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) - ret := makeClause(fmt.Sprintf("%s LIKE ?", filepathColumn), p) - - if not { - ret = ret.not() - } - - return ret -} - -// getPathSearchClauseMany splits the query string p on whitespace -// Used for backwards compatibility for the includes/excludes modifiers -func getPathSearchClauseMany(pathColumn, basenameColumn, p string, addWildcards, not bool) sqlClause { - q := strings.TrimSpace(p) - trimmedQuery := strings.Trim(q, "\"") - - if trimmedQuery == q { - q = regexp.MustCompile(`\s+`).ReplaceAllString(q, " ") - queryWords := strings.Split(q, " ") - - var ret []sqlClause - // Search for any word - for _, word := range queryWords { - ret = append(ret, getPathSearchClause(pathColumn, basenameColumn, word, addWildcards, not)) - } - - if !not { - return orClauses(ret...) - } - - return andClauses(ret...) - } - - return getPathSearchClause(pathColumn, basenameColumn, trimmedQuery, addWildcards, not) -} - -func intCriterionHandler(c *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - if addJoinFn != nil { - addJoinFn(f) - } - clause, args := getIntCriterionWhereClause(column, *c) - f.addWhere(clause, args...) - } - } -} - -func floatCriterionHandler(c *models.FloatCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - if addJoinFn != nil { - addJoinFn(f) - } - clause, args := getFloatCriterionWhereClause(column, *c) - f.addWhere(clause, args...) - } - } -} - -func boolCriterionHandler(c *bool, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - if addJoinFn != nil { - addJoinFn(f) - } - var v string - if *c { - v = "1" - } else { - v = "0" - } - - f.addWhere(column + " = " + v) - } - } -} - -func dateCriterionHandler(c *models.DateCriterionInput, column string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - clause, args := getDateCriterionWhereClause(column, *c) - f.addWhere(clause, args...) - } - } -} - -func timestampCriterionHandler(c *models.TimestampCriterionInput, column string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - clause, args := getTimestampCriterionWhereClause(column, *c) - f.addWhere(clause, args...) - } - } -} - -// handle for MultiCriterion where there is a join table between the new -// objects -type joinedMultiCriterionHandlerBuilder struct { - // table containing the primary objects - primaryTable string - // table joining primary and foreign objects - joinTable string - // alias for join table, if required - joinAs string - // foreign key of the primary object on the join table - primaryFK string - // foreign key of the foreign object on the join table - foreignFK string - - addJoinTable func(f *filterBuilder) -} - -func (m *joinedMultiCriterionHandlerBuilder) handler(c *models.MultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - // make local copy so we can modify it - criterion := *c - - joinAlias := m.joinAs - if joinAlias == "" { - joinAlias = m.joinTable - } - - if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { - var notClause string - if criterion.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - m.addJoinTable(f) - - f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ - "table": joinAlias, - "column": m.foreignFK, - "not": notClause, - })) - return - } - - if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { - return - } - - // combine excludes if excludes modifier is selected - if criterion.Modifier == models.CriterionModifierExcludes { - criterion.Modifier = models.CriterionModifierIncludesAll - criterion.Excludes = append(criterion.Excludes, criterion.Value...) - criterion.Value = nil - } - - if len(criterion.Value) > 0 { - whereClause := "" - havingClause := "" - - var args []interface{} - for _, tagID := range criterion.Value { - args = append(args, tagID) - } - - switch criterion.Modifier { - case models.CriterionModifierIncludes: - // includes any of the provided ids - m.addJoinTable(f) - whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) - case models.CriterionModifierEquals: - // includes only the provided ids - m.addJoinTable(f) - whereClause = utils.StrFormat("{joinAlias}.{foreignFK} IN {inBinding} AND (SELECT COUNT(*) FROM {joinTable} s WHERE s.{primaryFK} = {primaryTable}.id) = ?", utils.StrFormatMap{ - "joinAlias": joinAlias, - "foreignFK": m.foreignFK, - "inBinding": getInBinding(len(criterion.Value)), - "joinTable": m.joinTable, - "primaryFK": m.primaryFK, - "primaryTable": m.primaryTable, - }) - havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) - args = append(args, len(criterion.Value)) - case models.CriterionModifierNotEquals: - f.setError(fmt.Errorf("not equals modifier is not supported for multi criterion input")) - case models.CriterionModifierIncludesAll: - // includes all of the provided ids - m.addJoinTable(f) - whereClause = fmt.Sprintf("%s.%s IN %s", joinAlias, m.foreignFK, getInBinding(len(criterion.Value))) - havingClause = fmt.Sprintf("count(distinct %s.%s) IS %d", joinAlias, m.foreignFK, len(criterion.Value)) - } - - f.addWhere(whereClause, args...) - f.addHaving(havingClause) - } - - if len(criterion.Excludes) > 0 { - var args []interface{} - for _, tagID := range criterion.Excludes { - args = append(args, tagID) - } - - // excludes all of the provided ids - // need to use actual join table name for this - // .id NOT IN (select . from where . in ) - whereClause := fmt.Sprintf("%[1]s.id NOT IN (SELECT %[3]s.%[2]s from %[3]s where %[3]s.%[4]s in %[5]s)", m.primaryTable, m.primaryFK, m.joinTable, m.foreignFK, getInBinding(len(criterion.Excludes))) - - f.addWhere(whereClause, args...) - } - } - } -} - -type multiCriterionHandlerBuilder struct { - primaryTable string - foreignTable string - joinTable string - primaryFK string - foreignFK string - - // function that will be called to perform any necessary joins - addJoinsFunc func(f *filterBuilder) -} - -func (m *multiCriterionHandlerBuilder) handler(criterion *models.MultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { - if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { - var notClause string - if criterion.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - table := m.primaryTable - if m.joinTable != "" { - table = m.joinTable - f.addLeftJoin(table, "", fmt.Sprintf("%s.%s = %s.id", table, m.primaryFK, m.primaryTable)) - } - - f.addWhere(fmt.Sprintf("%s.%s IS %s NULL", table, m.foreignFK, notClause)) - return - } - - if len(criterion.Value) == 0 { - return - } - - var args []interface{} - for _, tagID := range criterion.Value { - args = append(args, tagID) - } - - if m.addJoinsFunc != nil { - m.addJoinsFunc(f) - } - - whereClause, havingClause := getMultiCriterionClause(m.primaryTable, m.foreignTable, m.joinTable, m.primaryFK, m.foreignFK, criterion) - f.addWhere(whereClause, args...) - f.addHaving(havingClause) - } - } -} - -type countCriterionHandlerBuilder struct { - primaryTable string - joinTable string - primaryFK string -} - -func (m *countCriterionHandlerBuilder) handler(criterion *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { - clause, args := getCountCriterionClause(m.primaryTable, m.joinTable, m.primaryFK, *criterion) - - f.addWhere(clause, args...) - } - } -} - -// handler for StringCriterion for string list fields -type stringListCriterionHandlerBuilder struct { - // table joining primary and foreign objects - joinTable string - // string field on the join table - stringColumn string - - addJoinTable func(f *filterBuilder) -} - -func (m *stringListCriterionHandlerBuilder) handler(criterion *models.StringCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { - m.addJoinTable(f) - - stringCriterionHandler(criterion, m.joinTable+"."+m.stringColumn)(ctx, f) - } - } -} - -func studioCriterionHandler(primaryTable string, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if studios == nil { - return - } - - studiosCopy := *studios - switch studiosCopy.Modifier { - case models.CriterionModifierEquals: - studiosCopy.Modifier = models.CriterionModifierIncludesAll - case models.CriterionModifierNotEquals: - studiosCopy.Modifier = models.CriterionModifierExcludes - } - - hh := hierarchicalMultiCriterionHandlerBuilder{ - tx: dbWrapper{}, - - primaryTable: primaryTable, - foreignTable: studioTable, - foreignFK: studioIDColumn, - parentFK: "parent_id", - } - - hh.handler(&studiosCopy)(ctx, f) - } -} - -type hierarchicalMultiCriterionHandlerBuilder struct { - tx dbWrapper - - primaryTable string - foreignTable string - foreignFK string - - parentFK string - childFK string - relationsTable string -} - -func getHierarchicalValues(ctx context.Context, tx dbWrapper, values []string, table, relationsTable, parentFK string, childFK string, depth *int) (string, error) { - var args []interface{} - - if parentFK == "" { - parentFK = "parent_id" - } - if childFK == "" { - childFK = "child_id" - } - - depthVal := 0 - if depth != nil { - depthVal = *depth - } - - if depthVal == 0 { - valid := true - var valuesClauses []string - for _, value := range values { - id, err := strconv.Atoi(value) - // In case of invalid value just run the query. - // Building VALUES() based on provided values just saves a query when depth is 0. - if err != nil { - valid = false - break - } - - valuesClauses = append(valuesClauses, fmt.Sprintf("(%d,%d)", id, id)) - } - - if valid { - return "VALUES" + strings.Join(valuesClauses, ","), nil - } - } - - for _, value := range values { - args = append(args, value) - } - inCount := len(args) - - var depthCondition string - if depthVal != -1 { - depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) - } - - withClauseMap := utils.StrFormatMap{ - "table": table, - "relationsTable": relationsTable, - "inBinding": getInBinding(inCount), - "recursiveSelect": "", - "parentFK": parentFK, - "childFK": childFK, - "depthCondition": depthCondition, - "unionClause": "", - } - - if relationsTable != "" { - withClauseMap["recursiveSelect"] = utils.StrFormat(`SELECT p.root_id, c.{childFK}, depth + 1 FROM {relationsTable} AS c -INNER JOIN items as p ON c.{parentFK} = p.item_id -`, withClauseMap) - } else { - withClauseMap["recursiveSelect"] = utils.StrFormat(`SELECT p.root_id, c.id, depth + 1 FROM {table} as c -INNER JOIN items as p ON c.{parentFK} = p.item_id -`, withClauseMap) - } - - if depthVal != 0 { - withClauseMap["unionClause"] = utils.StrFormat(` -UNION {recursiveSelect} {depthCondition} -`, withClauseMap) - } - - withClause := utils.StrFormat(`items AS ( -SELECT id as root_id, id as item_id, 0 as depth FROM {table} -WHERE id in {inBinding} -{unionClause}) -`, withClauseMap) - - query := fmt.Sprintf("WITH RECURSIVE %s SELECT 'VALUES' || GROUP_CONCAT('(' || root_id || ', ' || item_id || ')') AS val FROM items", withClause) - - var valuesClause sql.NullString - err := tx.Get(ctx, &valuesClause, query, args...) - if err != nil { - return "", fmt.Errorf("failed to get hierarchical values: %w", err) - } - - // if no values are found, just return a values string with the values only - if !valuesClause.Valid { - for i, value := range values { - values[i] = fmt.Sprintf("(%s, %s)", value, value) - } - valuesClause.String = "VALUES" + strings.Join(values, ",") - } - - return valuesClause.String, nil -} - -func addHierarchicalConditionClauses(f *filterBuilder, criterion models.HierarchicalMultiCriterionInput, table, idColumn string) { - switch criterion.Modifier { - case models.CriterionModifierIncludes: - f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) - case models.CriterionModifierIncludesAll: - f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) - f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", table, idColumn, len(criterion.Value))) - case models.CriterionModifierExcludes: - f.addWhere(fmt.Sprintf("%s.%s IS NULL", table, idColumn)) - } -} - -func (m *hierarchicalMultiCriterionHandlerBuilder) handler(c *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - // make a copy so we don't modify the original - criterion := *c - - // don't support equals/not equals - if criterion.Modifier == models.CriterionModifierEquals || criterion.Modifier == models.CriterionModifierNotEquals { - f.setError(fmt.Errorf("modifier %s is not supported for hierarchical multi criterion", criterion.Modifier)) - return - } - - if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { - var notClause string - if criterion.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ - "table": m.primaryTable, - "column": m.foreignFK, - "not": notClause, - })) - return - } - - if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { - return - } - - // combine excludes if excludes modifier is selected - if criterion.Modifier == models.CriterionModifierExcludes { - criterion.Modifier = models.CriterionModifierIncludesAll - criterion.Excludes = append(criterion.Excludes, criterion.Value...) - criterion.Value = nil - } - - if len(criterion.Value) > 0 { - valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) - if err != nil { - f.setError(err) - return - } - - switch criterion.Modifier { - case models.CriterionModifierIncludes: - f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) - case models.CriterionModifierIncludesAll: - f.addWhere(fmt.Sprintf("%s.%s IN (SELECT column2 FROM (%s))", m.primaryTable, m.foreignFK, valuesClause)) - f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", m.primaryTable, m.foreignFK, len(criterion.Value))) - } - } - - if len(criterion.Excludes) > 0 { - valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Excludes, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) - if err != nil { - f.setError(err) - return - } - - f.addWhere(fmt.Sprintf("%s.%s NOT IN (SELECT column2 FROM (%s)) OR %[1]s.%[2]s IS NULL", m.primaryTable, m.foreignFK, valuesClause)) - } - } - } -} - -type joinedHierarchicalMultiCriterionHandlerBuilder struct { - tx dbWrapper - - primaryTable string - primaryKey string - foreignTable string - foreignFK string - - parentFK string - childFK string - relationsTable string - - joinAs string - joinTable string - primaryFK string -} - -func (m *joinedHierarchicalMultiCriterionHandlerBuilder) addHierarchicalConditionClauses(f *filterBuilder, criterion models.HierarchicalMultiCriterionInput, table, idColumn string) { - primaryKey := m.primaryKey - if primaryKey == "" { - primaryKey = "id" - } - - switch criterion.Modifier { - case models.CriterionModifierEquals: - // includes only the provided ids - f.addWhere(fmt.Sprintf("%s.%s IS NOT NULL", table, idColumn)) - f.addHaving(fmt.Sprintf("count(distinct %s.%s) IS %d", table, idColumn, len(criterion.Value))) - f.addWhere(utils.StrFormat("(SELECT COUNT(*) FROM {joinTable} s WHERE s.{primaryFK} = {primaryTable}.{primaryKey}) = ?", utils.StrFormatMap{ - "joinTable": m.joinTable, - "primaryFK": m.primaryFK, - "primaryTable": m.primaryTable, - "primaryKey": primaryKey, - }), len(criterion.Value)) - case models.CriterionModifierNotEquals: - f.setError(fmt.Errorf("not equals modifier is not supported for hierarchical multi criterion input")) - default: - addHierarchicalConditionClauses(f, criterion, table, idColumn) - } -} - -func (m *joinedHierarchicalMultiCriterionHandlerBuilder) handler(c *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - // make a copy so we don't modify the original - criterion := *c - joinAlias := m.joinAs - primaryKey := m.primaryKey - if primaryKey == "" { - primaryKey = "id" - } - - if criterion.Modifier == models.CriterionModifierEquals && criterion.Depth != nil && *criterion.Depth != 0 { - f.setError(fmt.Errorf("depth is not supported for equals modifier in hierarchical multi criterion input")) - return - } - - if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { - var notClause string - if criterion.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin(m.joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.%s", joinAlias, m.primaryFK, m.primaryTable, primaryKey)) - - f.addWhere(utils.StrFormat("{table}.{column} IS {not} NULL", utils.StrFormatMap{ - "table": joinAlias, - "column": m.foreignFK, - "not": notClause, - })) - return - } - - // combine excludes if excludes modifier is selected - if criterion.Modifier == models.CriterionModifierExcludes { - criterion.Modifier = models.CriterionModifierIncludesAll - criterion.Excludes = append(criterion.Excludes, criterion.Value...) - criterion.Value = nil - } - - if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { - return - } - - if len(criterion.Value) > 0 { - valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Value, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) - if err != nil { - f.setError(err) - return - } - - joinTable := utils.StrFormat(`( - SELECT j.*, d.column1 AS root_id, d.column2 AS item_id FROM {joinTable} AS j - INNER JOIN ({valuesClause}) AS d ON j.{foreignFK} = d.column2 - ) - `, utils.StrFormatMap{ - "joinTable": m.joinTable, - "foreignFK": m.foreignFK, - "valuesClause": valuesClause, - }) - - f.addLeftJoin(joinTable, joinAlias, fmt.Sprintf("%s.%s = %s.%s", joinAlias, m.primaryFK, m.primaryTable, primaryKey)) - - m.addHierarchicalConditionClauses(f, criterion, joinAlias, "root_id") - } - - if len(criterion.Excludes) > 0 { - valuesClause, err := getHierarchicalValues(ctx, m.tx, criterion.Excludes, m.foreignTable, m.relationsTable, m.parentFK, m.childFK, criterion.Depth) - if err != nil { - f.setError(err) - return - } - - joinTable := utils.StrFormat(`( - SELECT j2.*, e.column1 AS root_id, e.column2 AS item_id FROM {joinTable} AS j2 - INNER JOIN ({valuesClause}) AS e ON j2.{foreignFK} = e.column2 - ) - `, utils.StrFormatMap{ - "joinTable": m.joinTable, - "foreignFK": m.foreignFK, - "valuesClause": valuesClause, - }) - - joinAlias2 := joinAlias + "2" - - f.addLeftJoin(joinTable, joinAlias2, fmt.Sprintf("%s.%s = %s.%s", joinAlias2, m.primaryFK, m.primaryTable, primaryKey)) - - // modify for exclusion - criterionCopy := criterion - criterionCopy.Modifier = models.CriterionModifierExcludes - criterionCopy.Value = c.Excludes - - m.addHierarchicalConditionClauses(f, criterionCopy, joinAlias2, "root_id") - } - } - } -} - -type joinedPerformerTagsHandler struct { - criterion *models.HierarchicalMultiCriterionInput - - primaryTable string // eg scenes - joinTable string // eg performers_scenes - joinPrimaryKey string // eg scene_id -} - -func (h *joinedPerformerTagsHandler) handle(ctx context.Context, f *filterBuilder) { - tags := h.criterion - - if tags != nil { - criterion := tags.CombineExcludes() - - // validate the modifier - switch criterion.Modifier { - case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: - // valid - default: - f.setError(fmt.Errorf("invalid modifier %s for performer tags", criterion.Modifier)) - } - - strFormatMap := utils.StrFormatMap{ - "primaryTable": h.primaryTable, - "joinTable": h.joinTable, - "joinPrimaryKey": h.joinPrimaryKey, - "inBinding": getInBinding(len(criterion.Value)), - } - - if criterion.Modifier == models.CriterionModifierIsNull || criterion.Modifier == models.CriterionModifierNotNull { - var notClause string - if criterion.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin(h.joinTable, "", utils.StrFormat("{primaryTable}.id = {joinTable}.{joinPrimaryKey}", strFormatMap)) - f.addLeftJoin("performers_tags", "", utils.StrFormat("{joinTable}.performer_id = performers_tags.performer_id", strFormatMap)) - - f.addWhere(fmt.Sprintf("performers_tags.tag_id IS %s NULL", notClause)) - return - } - - if len(criterion.Value) == 0 && len(criterion.Excludes) == 0 { - return - } - - if len(criterion.Value) > 0 { - valuesClause, err := getHierarchicalValues(ctx, dbWrapper{}, criterion.Value, tagTable, "tags_relations", "", "", criterion.Depth) - if err != nil { - f.setError(err) - return - } - - f.addWith(utils.StrFormat(`performer_tags AS ( -SELECT ps.{joinPrimaryKey} as primaryID, t.column1 AS root_tag_id FROM {joinTable} ps -INNER JOIN performers_tags pt ON pt.performer_id = ps.performer_id -INNER JOIN (`+valuesClause+`) t ON t.column2 = pt.tag_id -)`, strFormatMap)) - - f.addLeftJoin("performer_tags", "", utils.StrFormat("performer_tags.primaryID = {primaryTable}.id", strFormatMap)) - - addHierarchicalConditionClauses(f, criterion, "performer_tags", "root_tag_id") - } - - if len(criterion.Excludes) > 0 { - valuesClause, err := getHierarchicalValues(ctx, dbWrapper{}, criterion.Excludes, tagTable, "tags_relations", "", "", criterion.Depth) - if err != nil { - f.setError(err) - return - } - - clause := utils.StrFormat("{primaryTable}.id NOT IN (SELECT {joinTable}.{joinPrimaryKey} FROM {joinTable} INNER JOIN performers_tags ON {joinTable}.performer_id = performers_tags.performer_id WHERE performers_tags.tag_id IN (SELECT column2 FROM (%s)))", strFormatMap) - f.addWhere(fmt.Sprintf(clause, valuesClause)) - } - } -} - -type stashIDCriterionHandler struct { - c *models.StashIDCriterionInput - stashIDRepository *stashIDRepository - stashIDTableAs string - parentIDCol string -} - -func (h *stashIDCriterionHandler) handle(ctx context.Context, f *filterBuilder) { - if h.c == nil { - return - } - - stashIDRepo := h.stashIDRepository - t := stashIDRepo.tableName - if h.stashIDTableAs != "" { - t = h.stashIDTableAs - } - - joinClause := fmt.Sprintf("%s.%s = %s", t, stashIDRepo.idColumn, h.parentIDCol) - if h.c.Endpoint != nil && *h.c.Endpoint != "" { - joinClause += fmt.Sprintf(" AND %s.endpoint = '%s'", t, *h.c.Endpoint) - } - - f.addLeftJoin(stashIDRepo.tableName, h.stashIDTableAs, joinClause) - - v := "" - if h.c.StashID != nil { - v = *h.c.StashID - } - - stringCriterionHandler(&models.StringCriterionInput{ - Value: v, - Modifier: h.c.Modifier, - }, t+".stash_id")(ctx, f) -} diff --git a/pkg/sqlite/filter_hierarchical.go b/pkg/sqlite/filter_hierarchical.go new file mode 100644 index 00000000000..bc5ff9032b3 --- /dev/null +++ b/pkg/sqlite/filter_hierarchical.go @@ -0,0 +1,222 @@ +package sqlite + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +// hierarchicalRelationshipHandler provides handlers for parent, children, parent count, and child count criteria. +type hierarchicalRelationshipHandler struct { + primaryTable string + relationTable string + aliasPrefix string + parentIDCol string + childIDCol string +} + +func (h hierarchicalRelationshipHandler) validateModifier(m models.CriterionModifier) error { + switch m { + case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: + // valid + return nil + default: + return fmt.Errorf("invalid modifier %s", m) + } +} + +func (h hierarchicalRelationshipHandler) handleNullNotNull(f *filterBuilder, m models.CriterionModifier, isParents bool) { + var notClause string + if m == models.CriterionModifierNotNull { + notClause = "NOT" + } + + as := h.aliasPrefix + "_parents" + col := h.childIDCol + if !isParents { + as = h.aliasPrefix + "_children" + col = h.parentIDCol + } + + // Based on: + // f.addLeftJoin("tags_relations", "parent_relations", "tags.id = parent_relations.child_id") + // f.addWhere(fmt.Sprintf("parent_relations.parent_id IS %s NULL", notClause)) + + f.addLeftJoin(h.relationTable, as, fmt.Sprintf("%s.id = %s.%s", h.primaryTable, as, col)) + f.addWhere(fmt.Sprintf("%s.%s IS %s NULL", as, col, notClause)) +} + +func (h hierarchicalRelationshipHandler) parentsAlias() string { + return h.aliasPrefix + "_parents" +} + +func (h hierarchicalRelationshipHandler) childrenAlias() string { + return h.aliasPrefix + "_children" +} + +func (h hierarchicalRelationshipHandler) valueQuery(value []string, depth int, alias string, isParents bool) string { + var depthCondition string + if depth != -1 { + depthCondition = fmt.Sprintf("WHERE depth < %d", depth) + } + + queryTempl := `{alias} AS ( +SELECT {root_id_col} AS root_id, {item_id_col} AS item_id, 0 AS depth FROM {relation_table} WHERE {root_id_col} IN` + getInBinding(len(value)) + ` +UNION +SELECT root_id, {item_id_col}, depth + 1 FROM {relation_table} INNER JOIN {alias} ON item_id = {root_id_col} ` + depthCondition + ` +)` + + var queryMap utils.StrFormatMap + if isParents { + queryMap = utils.StrFormatMap{ + "root_id_col": h.parentIDCol, + "item_id_col": h.childIDCol, + } + } else { + queryMap = utils.StrFormatMap{ + "root_id_col": h.childIDCol, + "item_id_col": h.parentIDCol, + } + } + + queryMap["alias"] = alias + queryMap["relation_table"] = h.relationTable + + return utils.StrFormat(queryTempl, queryMap) +} + +func (h hierarchicalRelationshipHandler) handleValues(f *filterBuilder, c models.HierarchicalMultiCriterionInput, isParents bool, aliasSuffix string) { + if len(c.Value) == 0 { + return + } + + var args []interface{} + for _, val := range c.Value { + args = append(args, val) + } + + depthVal := 0 + if c.Depth != nil { + depthVal = *c.Depth + } + + tableAlias := h.parentsAlias() + if !isParents { + tableAlias = h.childrenAlias() + } + tableAlias += aliasSuffix + + query := h.valueQuery(c.Value, depthVal, tableAlias, isParents) + f.addRecursiveWith(query, args...) + + f.addLeftJoin(tableAlias, "", fmt.Sprintf("%s.item_id = %s.id", tableAlias, h.primaryTable)) + addHierarchicalConditionClauses(f, c, tableAlias, "root_id") +} + +func (h hierarchicalRelationshipHandler) handleValuesSimple(f *filterBuilder, value string, isParents bool) { + joinCol := h.childIDCol + valueCol := h.parentIDCol + if !isParents { + joinCol = h.parentIDCol + valueCol = h.childIDCol + } + + tableAlias := h.parentsAlias() + if !isParents { + tableAlias = h.childrenAlias() + } + + f.addInnerJoin(h.relationTable, tableAlias, fmt.Sprintf("%s.%s = %s.id", tableAlias, joinCol, h.primaryTable)) + f.addWhere(fmt.Sprintf("%s.%s = ?", tableAlias, valueCol), value) +} + +func (h hierarchicalRelationshipHandler) hierarchicalCriterionHandler(criterion *models.HierarchicalMultiCriterionInput, isParents bool) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if criterion != nil { + c := criterion.CombineExcludes() + + // validate the modifier + if err := h.validateModifier(c.Modifier); err != nil { + f.setError(err) + return + } + + if c.Modifier == models.CriterionModifierIsNull || c.Modifier == models.CriterionModifierNotNull { + h.handleNullNotNull(f, c.Modifier, isParents) + return + } + + if len(c.Value) == 0 && len(c.Excludes) == 0 { + return + } + + depth := 0 + if c.Depth != nil { + depth = *c.Depth + } + + // if we have a single include, no excludes, and no depth, we can use a simple join and where clause + if (c.Modifier == models.CriterionModifierIncludes || c.Modifier == models.CriterionModifierIncludesAll) && len(c.Value) == 1 && len(c.Excludes) == 0 && depth == 0 { + h.handleValuesSimple(f, c.Value[0], isParents) + return + } + + aliasSuffix := "" + h.handleValues(f, c, isParents, aliasSuffix) + + if len(c.Excludes) > 0 { + exCriterion := models.HierarchicalMultiCriterionInput{ + Value: c.Excludes, + Depth: c.Depth, + Modifier: models.CriterionModifierExcludes, + } + + aliasSuffix := "2" + h.handleValues(f, exCriterion, isParents, aliasSuffix) + } + } + } +} + +func (h hierarchicalRelationshipHandler) ParentsCriterionHandler(criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + const isParents = true + return h.hierarchicalCriterionHandler(criterion, isParents) +} + +func (h hierarchicalRelationshipHandler) ChildrenCriterionHandler(criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + const isParents = false + return h.hierarchicalCriterionHandler(criterion, isParents) +} + +func (h hierarchicalRelationshipHandler) countCriterionHandler(c *models.IntCriterionInput, isParents bool) criterionHandlerFunc { + tableAlias := h.parentsAlias() + col := h.childIDCol + otherCol := h.parentIDCol + if !isParents { + tableAlias = h.childrenAlias() + col = h.parentIDCol + otherCol = h.childIDCol + } + tableAlias += "_count" + + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + f.addLeftJoin(h.relationTable, tableAlias, fmt.Sprintf("%s.%s = %s.id", tableAlias, col, h.primaryTable)) + clause, args := getIntCriterionWhereClause(fmt.Sprintf("count(distinct %s.%s)", tableAlias, otherCol), *c) + + f.addHaving(clause, args...) + } + } +} + +func (h hierarchicalRelationshipHandler) ParentCountCriterionHandler(parentCount *models.IntCriterionInput) criterionHandlerFunc { + const isParents = true + return h.countCriterionHandler(parentCount, isParents) +} + +func (h hierarchicalRelationshipHandler) ChildCountCriterionHandler(childCount *models.IntCriterionInput) criterionHandlerFunc { + const isParents = false + return h.countCriterionHandler(childCount, isParents) +} diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index 7ddb514d054..5473b9c36ee 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -6,7 +6,6 @@ import ( "errors" "fmt" "path/filepath" - "regexp" "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" @@ -113,9 +112,75 @@ func (r *galleryRowRecord) fromPartial(o models.GalleryPartial) { r.setTimestamp("updated_at", o.UpdatedAt) } -type GalleryStore struct { +type galleryRepositoryType struct { repository + performers joinRepository + images joinRepository + tags joinRepository + scenes joinRepository + files filesRepository +} + +func (r *galleryRepositoryType) addGalleriesFilesTable(f *filterBuilder) { + f.addLeftJoin(galleriesFilesTable, "", "galleries_files.gallery_id = galleries.id") +} + +func (r *galleryRepositoryType) addFilesTable(f *filterBuilder) { + r.addGalleriesFilesTable(f) + f.addLeftJoin(fileTable, "", "galleries_files.file_id = files.id") +} + +func (r *galleryRepositoryType) addFoldersTable(f *filterBuilder) { + r.addFilesTable(f) + f.addLeftJoin(folderTable, "", "files.parent_folder_id = folders.id") +} + +var ( + galleryRepository = galleryRepositoryType{ + repository: repository{ + tableName: galleryTable, + idColumn: idColumn, + }, + performers: joinRepository{ + repository: repository{ + tableName: performersGalleriesTable, + idColumn: galleryIDColumn, + }, + fkColumn: "performer_id", + }, + tags: joinRepository{ + repository: repository{ + tableName: galleriesTagsTable, + idColumn: galleryIDColumn, + }, + fkColumn: "tag_id", + foreignTable: tagTable, + orderBy: "tags.name ASC", + }, + images: joinRepository{ + repository: repository{ + tableName: galleriesImagesTable, + idColumn: galleryIDColumn, + }, + fkColumn: "image_id", + }, + scenes: joinRepository{ + repository: repository{ + tableName: galleriesScenesTable, + idColumn: galleryIDColumn, + }, + fkColumn: sceneIDColumn, + }, + files: filesRepository{ + repository: repository{ + tableName: galleriesFilesTable, + idColumn: galleryIDColumn, + }, + }, + } +) +type GalleryStore struct { tableMgr *table fileStore *FileStore @@ -124,10 +189,6 @@ type GalleryStore struct { func NewGalleryStore(fileStore *FileStore, folderStore *FolderStore) *GalleryStore { return &GalleryStore{ - repository: repository{ - tableName: galleryTable, - idColumn: idColumn, - }, tableMgr: galleryTableMgr, fileStore: fileStore, folderStore: folderStore, @@ -309,7 +370,7 @@ func (qb *GalleryStore) Destroy(ctx context.Context, id int) error { } func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { - fileIDs, err := qb.filesRepository().get(ctx, id) + fileIDs, err := galleryRepository.files.get(ctx, id) if err != nil { return nil, err } @@ -328,7 +389,7 @@ func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]models.File, er func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false - return qb.filesRepository().getMany(ctx, ids, primaryOnly) + return galleryRepository.files.getMany(ctx, ids, primaryOnly) } // returns nil, nil if not found @@ -617,116 +678,6 @@ func (qb *GalleryStore) All(ctx context.Context) ([]*models.Gallery, error) { return qb.getMany(ctx, qb.selectDataset()) } -func (qb *GalleryStore) validateFilter(galleryFilter *models.GalleryFilterType) error { - const and = "AND" - const or = "OR" - const not = "NOT" - - if galleryFilter.And != nil { - if galleryFilter.Or != nil { - return illegalFilterCombination(and, or) - } - if galleryFilter.Not != nil { - return illegalFilterCombination(and, not) - } - - return qb.validateFilter(galleryFilter.And) - } - - if galleryFilter.Or != nil { - if galleryFilter.Not != nil { - return illegalFilterCombination(or, not) - } - - return qb.validateFilter(galleryFilter.Or) - } - - if galleryFilter.Not != nil { - return qb.validateFilter(galleryFilter.Not) - } - - return nil -} - -func (qb *GalleryStore) makeFilter(ctx context.Context, galleryFilter *models.GalleryFilterType) *filterBuilder { - query := &filterBuilder{} - - if galleryFilter.And != nil { - query.and(qb.makeFilter(ctx, galleryFilter.And)) - } - if galleryFilter.Or != nil { - query.or(qb.makeFilter(ctx, galleryFilter.Or)) - } - if galleryFilter.Not != nil { - query.not(qb.makeFilter(ctx, galleryFilter.Not)) - } - - query.handleCriterion(ctx, intCriterionHandler(galleryFilter.ID, "galleries.id", nil)) - query.handleCriterion(ctx, stringCriterionHandler(galleryFilter.Title, "galleries.title")) - query.handleCriterion(ctx, stringCriterionHandler(galleryFilter.Code, "galleries.code")) - query.handleCriterion(ctx, stringCriterionHandler(galleryFilter.Details, "galleries.details")) - query.handleCriterion(ctx, stringCriterionHandler(galleryFilter.Photographer, "galleries.photographer")) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if galleryFilter.Checksum != nil { - qb.addGalleriesFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_md5", "galleries_files.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'") - } - - stringCriterionHandler(galleryFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) - })) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if galleryFilter.IsZip != nil { - qb.addGalleriesFilesTable(f) - if *galleryFilter.IsZip { - - f.addWhere("galleries_files.file_id IS NOT NULL") - } else { - f.addWhere("galleries_files.file_id IS NULL") - } - } - })) - - query.handleCriterion(ctx, qb.galleryPathCriterionHandler(galleryFilter.Path)) - query.handleCriterion(ctx, galleryFileCountCriterionHandler(qb, galleryFilter.FileCount)) - query.handleCriterion(ctx, intCriterionHandler(galleryFilter.Rating100, "galleries.rating", nil)) - query.handleCriterion(ctx, galleryURLsCriterionHandler(galleryFilter.URL)) - query.handleCriterion(ctx, boolCriterionHandler(galleryFilter.Organized, "galleries.organized", nil)) - query.handleCriterion(ctx, galleryIsMissingCriterionHandler(qb, galleryFilter.IsMissing)) - query.handleCriterion(ctx, galleryTagsCriterionHandler(qb, galleryFilter.Tags)) - query.handleCriterion(ctx, galleryTagCountCriterionHandler(qb, galleryFilter.TagCount)) - query.handleCriterion(ctx, galleryPerformersCriterionHandler(qb, galleryFilter.Performers)) - query.handleCriterion(ctx, galleryPerformerCountCriterionHandler(qb, galleryFilter.PerformerCount)) - query.handleCriterion(ctx, hasChaptersCriterionHandler(galleryFilter.HasChapters)) - query.handleCriterion(ctx, galleryScenesCriterionHandler(qb, galleryFilter.Scenes)) - query.handleCriterion(ctx, studioCriterionHandler(galleryTable, galleryFilter.Studios)) - query.handleCriterion(ctx, galleryPerformerTagsCriterionHandler(qb, galleryFilter.PerformerTags)) - query.handleCriterion(ctx, galleryAverageResolutionCriterionHandler(qb, galleryFilter.AverageResolution)) - query.handleCriterion(ctx, galleryImageCountCriterionHandler(qb, galleryFilter.ImageCount)) - query.handleCriterion(ctx, galleryPerformerFavoriteCriterionHandler(galleryFilter.PerformerFavorite)) - query.handleCriterion(ctx, galleryPerformerAgeCriterionHandler(galleryFilter.PerformerAge)) - query.handleCriterion(ctx, dateCriterionHandler(galleryFilter.Date, "galleries.date")) - query.handleCriterion(ctx, timestampCriterionHandler(galleryFilter.CreatedAt, "galleries.created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(galleryFilter.UpdatedAt, "galleries.updated_at")) - - return query -} - -func (qb *GalleryStore) addGalleriesFilesTable(f *filterBuilder) { - f.addLeftJoin(galleriesFilesTable, "", "galleries_files.gallery_id = galleries.id") -} - -func (qb *GalleryStore) addFilesTable(f *filterBuilder) { - qb.addGalleriesFilesTable(f) - f.addLeftJoin(fileTable, "", "galleries_files.file_id = files.id") -} - -func (qb *GalleryStore) addFoldersTable(f *filterBuilder) { - qb.addFilesTable(f) - f.addLeftJoin(folderTable, "", "files.parent_folder_id = folders.id") -} - func (qb *GalleryStore) makeQuery(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { if galleryFilter == nil { galleryFilter = &models.GalleryFilterType{} @@ -735,7 +686,7 @@ func (qb *GalleryStore) makeQuery(ctx context.Context, galleryFilter *models.Gal findFilter = &models.FindFilterType{} } - query := qb.newQuery() + query := galleryRepository.newQuery() distinctIDs(&query, galleryTable) if q := findFilter.Q; q != nil && *q != "" { @@ -773,10 +724,9 @@ func (qb *GalleryStore) makeQuery(ctx context.Context, galleryFilter *models.Gal query.parseQueryString(searchColumns, *q) } - if err := qb.validateFilter(galleryFilter); err != nil { - return nil, err - } - filter := qb.makeFilter(ctx, galleryFilter) + filter := filterBuilderFromHandler(ctx, &galleryFilterHandler{ + galleryFilter: galleryFilter, + }) if err := query.addFilter(filter); err != nil { return nil, err @@ -818,290 +768,6 @@ func (qb *GalleryStore) QueryCount(ctx context.Context, galleryFilter *models.Ga return query.executeCount(ctx) } -func galleryURLsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { - h := stringListCriterionHandlerBuilder{ - joinTable: galleriesURLsTable, - stringColumn: galleriesURLColumn, - addJoinTable: func(f *filterBuilder) { - galleriesURLsTableMgr.join(f, "", "galleries.id") - }, - } - - return h.handler(url) -} - -func (qb *GalleryStore) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { - return multiCriterionHandlerBuilder{ - primaryTable: galleryTable, - foreignTable: foreignTable, - joinTable: joinTable, - primaryFK: galleryIDColumn, - foreignFK: foreignFK, - addJoinsFunc: addJoinsFunc, - } -} - -func (qb *GalleryStore) galleryPathCriterionHandler(c *models.StringCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if c != nil { - qb.addFoldersTable(f) - f.addLeftJoin(folderTable, "gallery_folder", "galleries.folder_id = gallery_folder.id") - - const pathColumn = "folders.path" - const basenameColumn = "files.basename" - const folderPathColumn = "gallery_folder.path" - - addWildcards := true - not := false - - if modifier := c.Modifier; c.Modifier.IsValid() { - switch modifier { - case models.CriterionModifierIncludes: - clause := getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not) - clause2 := getStringSearchClause([]string{folderPathColumn}, c.Value, false) - f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) - case models.CriterionModifierExcludes: - not = true - clause := getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not) - clause2 := getStringSearchClause([]string{folderPathColumn}, c.Value, true) - f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) - case models.CriterionModifierEquals: - addWildcards = false - clause := getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not) - clause2 := makeClause(folderPathColumn+" LIKE ?", c.Value) - f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) - case models.CriterionModifierNotEquals: - addWildcards = false - not = true - clause := getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not) - clause2 := makeClause(folderPathColumn+" NOT LIKE ?", c.Value) - f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) - case models.CriterionModifierMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - f.setError(err) - return - } - filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) - clause := makeClause(fmt.Sprintf("%s IS NOT NULL AND %s IS NOT NULL AND %s regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) - clause2 := makeClause(fmt.Sprintf("%s IS NOT NULL AND %[1]s regexp ?", folderPathColumn), c.Value) - f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) - case models.CriterionModifierNotMatchesRegex: - if _, err := regexp.Compile(c.Value); err != nil { - f.setError(err) - return - } - filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) - f.addWhere(fmt.Sprintf("%s IS NULL OR %s IS NULL OR %s NOT regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) - f.addWhere(fmt.Sprintf("%s IS NULL OR %[1]s NOT regexp ?", folderPathColumn), c.Value) - case models.CriterionModifierIsNull: - f.addWhere(fmt.Sprintf("%s IS NULL OR TRIM(%[1]s) = '' OR %s IS NULL OR TRIM(%[2]s) = ''", pathColumn, basenameColumn)) - f.addWhere(fmt.Sprintf("%s IS NULL OR TRIM(%[1]s) = ''", folderPathColumn)) - case models.CriterionModifierNotNull: - clause := makeClause(fmt.Sprintf("%s IS NOT NULL AND TRIM(%[1]s) != '' AND %s IS NOT NULL AND TRIM(%[2]s) != ''", pathColumn, basenameColumn)) - clause2 := makeClause(fmt.Sprintf("%s IS NOT NULL AND TRIM(%[1]s) != ''", folderPathColumn)) - f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) - default: - panic("unsupported string filter modifier") - } - } - } - } -} - -func galleryFileCountCriterionHandler(qb *GalleryStore, fileCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: galleryTable, - joinTable: galleriesFilesTable, - primaryFK: galleryIDColumn, - } - - return h.handler(fileCount) -} - -func galleryIsMissingCriterionHandler(qb *GalleryStore, isMissing *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if isMissing != nil && *isMissing != "" { - switch *isMissing { - case "url": - galleriesURLsTableMgr.join(f, "", "galleries.id") - f.addWhere("gallery_urls.url IS NULL") - case "scenes": - f.addLeftJoin("scenes_galleries", "scenes_join", "scenes_join.gallery_id = galleries.id") - f.addWhere("scenes_join.gallery_id IS NULL") - case "studio": - f.addWhere("galleries.studio_id IS NULL") - case "performers": - qb.performersRepository().join(f, "performers_join", "galleries.id") - f.addWhere("performers_join.gallery_id IS NULL") - case "date": - f.addWhere("galleries.date IS NULL OR galleries.date IS \"\"") - case "tags": - qb.tagsRepository().join(f, "tags_join", "galleries.id") - f.addWhere("tags_join.gallery_id IS NULL") - default: - f.addWhere("(galleries." + *isMissing + " IS NULL OR TRIM(galleries." + *isMissing + ") = '')") - } - } - } -} - -func galleryTagsCriterionHandler(qb *GalleryStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := joinedHierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: galleryTable, - foreignTable: tagTable, - foreignFK: "tag_id", - - relationsTable: "tags_relations", - joinAs: "image_tag", - joinTable: galleriesTagsTable, - primaryFK: galleryIDColumn, - } - - return h.handler(tags) -} - -func galleryTagCountCriterionHandler(qb *GalleryStore, tagCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: galleryTable, - joinTable: galleriesTagsTable, - primaryFK: galleryIDColumn, - } - - return h.handler(tagCount) -} - -func galleryScenesCriterionHandler(qb *GalleryStore, scenes *models.MultiCriterionInput) criterionHandlerFunc { - addJoinsFunc := func(f *filterBuilder) { - qb.scenesRepository().join(f, "", "galleries.id") - f.addLeftJoin("scenes", "", "scenes_galleries.scene_id = scenes.id") - } - h := qb.getMultiCriterionHandlerBuilder(sceneTable, galleriesScenesTable, "scene_id", addJoinsFunc) - return h.handler(scenes) -} - -func galleryPerformersCriterionHandler(qb *GalleryStore, performers *models.MultiCriterionInput) criterionHandlerFunc { - h := joinedMultiCriterionHandlerBuilder{ - primaryTable: galleryTable, - joinTable: performersGalleriesTable, - joinAs: "performers_join", - primaryFK: galleryIDColumn, - foreignFK: performerIDColumn, - - addJoinTable: func(f *filterBuilder) { - qb.performersRepository().join(f, "performers_join", "galleries.id") - }, - } - - return h.handler(performers) -} - -func galleryPerformerCountCriterionHandler(qb *GalleryStore, performerCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: galleryTable, - joinTable: performersGalleriesTable, - primaryFK: galleryIDColumn, - } - - return h.handler(performerCount) -} - -func galleryImageCountCriterionHandler(qb *GalleryStore, imageCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: galleryTable, - joinTable: galleriesImagesTable, - primaryFK: galleryIDColumn, - } - - return h.handler(imageCount) -} - -func hasChaptersCriterionHandler(hasChapters *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if hasChapters != nil { - f.addLeftJoin("galleries_chapters", "", "galleries_chapters.gallery_id = galleries.id") - if *hasChapters == "true" { - f.addHaving("count(galleries_chapters.gallery_id) > 0") - } else { - f.addWhere("galleries_chapters.id IS NULL") - } - } - } -} - -func galleryPerformerTagsCriterionHandler(qb *GalleryStore, tags *models.HierarchicalMultiCriterionInput) criterionHandler { - return &joinedPerformerTagsHandler{ - criterion: tags, - primaryTable: galleryTable, - joinTable: performersGalleriesTable, - joinPrimaryKey: galleryIDColumn, - } -} - -func galleryPerformerFavoriteCriterionHandler(performerfavorite *bool) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performerfavorite != nil { - f.addLeftJoin("performers_galleries", "", "galleries.id = performers_galleries.gallery_id") - - if *performerfavorite { - // contains at least one favorite - f.addLeftJoin("performers", "", "performers.id = performers_galleries.performer_id") - f.addWhere("performers.favorite = 1") - } else { - // contains zero favorites - f.addLeftJoin(`(SELECT performers_galleries.gallery_id as id FROM performers_galleries -JOIN performers ON performers.id = performers_galleries.performer_id -GROUP BY performers_galleries.gallery_id HAVING SUM(performers.favorite) = 0)`, "nofaves", "galleries.id = nofaves.id") - f.addWhere("performers_galleries.gallery_id IS NULL OR nofaves.id IS NOT NULL") - } - } - } -} - -func galleryPerformerAgeCriterionHandler(performerAge *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performerAge != nil { - f.addInnerJoin("performers_galleries", "", "galleries.id = performers_galleries.gallery_id") - f.addInnerJoin("performers", "", "performers_galleries.performer_id = performers.id") - - f.addWhere("galleries.date != '' AND performers.birthdate != ''") - f.addWhere("galleries.date IS NOT NULL AND performers.birthdate IS NOT NULL") - - ageCalc := "cast(strftime('%Y.%m%d', galleries.date) - strftime('%Y.%m%d', performers.birthdate) as int)" - whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2) - f.addWhere(whereClause, args...) - } - } -} - -func galleryAverageResolutionCriterionHandler(qb *GalleryStore, resolution *models.ResolutionCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if resolution != nil && resolution.Value.IsValid() { - qb.imagesRepository().join(f, "images_join", "galleries.id") - f.addLeftJoin("images", "", "images_join.image_id = images.id") - f.addLeftJoin("images_files", "", "images.id = images_files.image_id") - f.addLeftJoin("image_files", "", "images_files.file_id = image_files.file_id") - - min := resolution.Value.GetMinResolution() - max := resolution.Value.GetMaxResolution() - - const widthHeight = "avg(MIN(image_files.width, image_files.height))" - - switch resolution.Modifier { - case models.CriterionModifierEquals: - f.addHaving(fmt.Sprintf("%s BETWEEN %d AND %d", widthHeight, min, max)) - case models.CriterionModifierNotEquals: - f.addHaving(fmt.Sprintf("%s NOT BETWEEN %d AND %d", widthHeight, min, max)) - case models.CriterionModifierLessThan: - f.addHaving(fmt.Sprintf("%s < %d", widthHeight, min)) - case models.CriterionModifierGreaterThan: - f.addHaving(fmt.Sprintf("%s > %d", widthHeight, max)) - } - } - } -} - var gallerySortOptions = sortOptions{ "created_at", "date", @@ -1194,92 +860,44 @@ func (qb *GalleryStore) GetURLs(ctx context.Context, galleryID int) ([]string, e return galleriesURLsTableMgr.get(ctx, galleryID) } -func (qb *GalleryStore) filesRepository() *filesRepository { - return &filesRepository{ - repository: repository{ - tx: qb.tx, - tableName: galleriesFilesTable, - idColumn: galleryIDColumn, - }, - } -} - func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } -func (qb *GalleryStore) performersRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: performersGalleriesTable, - idColumn: galleryIDColumn, - }, - fkColumn: "performer_id", - } -} - func (qb *GalleryStore) GetPerformerIDs(ctx context.Context, id int) ([]int, error) { - return qb.performersRepository().getIDs(ctx, id) -} - -func (qb *GalleryStore) tagsRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: galleriesTagsTable, - idColumn: galleryIDColumn, - }, - fkColumn: "tag_id", - foreignTable: tagTable, - orderBy: "tags.name ASC", - } + return galleryRepository.performers.getIDs(ctx, id) } func (qb *GalleryStore) GetTagIDs(ctx context.Context, id int) ([]int, error) { - return qb.tagsRepository().getIDs(ctx, id) -} - -func (qb *GalleryStore) imagesRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: galleriesImagesTable, - idColumn: galleryIDColumn, - }, - fkColumn: "image_id", - } + return galleryRepository.tags.getIDs(ctx, id) } func (qb *GalleryStore) GetImageIDs(ctx context.Context, galleryID int) ([]int, error) { - return qb.imagesRepository().getIDs(ctx, galleryID) + return galleryRepository.images.getIDs(ctx, galleryID) } func (qb *GalleryStore) AddImages(ctx context.Context, galleryID int, imageIDs ...int) error { - return qb.imagesRepository().insertOrIgnore(ctx, galleryID, imageIDs...) + return galleryRepository.images.insertOrIgnore(ctx, galleryID, imageIDs...) } func (qb *GalleryStore) RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error { - return qb.imagesRepository().destroyJoins(ctx, galleryID, imageIDs...) + return galleryRepository.images.destroyJoins(ctx, galleryID, imageIDs...) } func (qb *GalleryStore) UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error { // Delete the existing joins and then create new ones - return qb.imagesRepository().replace(ctx, galleryID, imageIDs) + return galleryRepository.images.replace(ctx, galleryID, imageIDs) } -func (qb *GalleryStore) scenesRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: galleriesScenesTable, - idColumn: galleryIDColumn, - }, - fkColumn: sceneIDColumn, - } +func (qb *GalleryStore) SetCover(ctx context.Context, galleryID int, coverImageID int) error { + return imageGalleriesTableMgr.setCover(ctx, coverImageID, galleryID) +} + +func (qb *GalleryStore) ResetCover(ctx context.Context, galleryID int) error { + return imageGalleriesTableMgr.resetCover(ctx, galleryID) } func (qb *GalleryStore) GetSceneIDs(ctx context.Context, id int) ([]int, error) { - return qb.scenesRepository().getIDs(ctx, id) + return galleryRepository.scenes.getIDs(ctx, id) } diff --git a/pkg/sqlite/gallery_filter.go b/pkg/sqlite/gallery_filter.go new file mode 100644 index 00000000000..ad5ac592ada --- /dev/null +++ b/pkg/sqlite/gallery_filter.go @@ -0,0 +1,434 @@ +package sqlite + +import ( + "context" + "fmt" + "path/filepath" + "regexp" + + "github.com/stashapp/stash/pkg/models" +) + +type galleryFilterHandler struct { + galleryFilter *models.GalleryFilterType +} + +func (qb *galleryFilterHandler) validate() error { + galleryFilter := qb.galleryFilter + if galleryFilter == nil { + return nil + } + + if err := validateFilterCombination(galleryFilter.OperatorFilter); err != nil { + return err + } + + if subFilter := galleryFilter.SubFilter(); subFilter != nil { + sqb := &galleryFilterHandler{galleryFilter: subFilter} + if err := sqb.validate(); err != nil { + return err + } + } + + return nil +} + +func (qb *galleryFilterHandler) handle(ctx context.Context, f *filterBuilder) { + galleryFilter := qb.galleryFilter + if galleryFilter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + sf := galleryFilter.SubFilter() + if sf != nil { + sub := &galleryFilterHandler{sf} + handleSubFilter(ctx, sub, f, galleryFilter.OperatorFilter) + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +func (qb *galleryFilterHandler) criterionHandler() criterionHandler { + filter := qb.galleryFilter + return compoundHandler{ + intCriterionHandler(filter.ID, "galleries.id", nil), + stringCriterionHandler(filter.Title, "galleries.title"), + stringCriterionHandler(filter.Code, "galleries.code"), + stringCriterionHandler(filter.Details, "galleries.details"), + stringCriterionHandler(filter.Photographer, "galleries.photographer"), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if filter.Checksum != nil { + galleryRepository.addGalleriesFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_md5", "galleries_files.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'") + } + + stringCriterionHandler(filter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) + }), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if filter.IsZip != nil { + galleryRepository.addGalleriesFilesTable(f) + if *filter.IsZip { + + f.addWhere("galleries_files.file_id IS NOT NULL") + } else { + f.addWhere("galleries_files.file_id IS NULL") + } + } + }), + + qb.pathCriterionHandler(filter.Path), + qb.fileCountCriterionHandler(filter.FileCount), + intCriterionHandler(filter.Rating100, "galleries.rating", nil), + qb.urlsCriterionHandler(filter.URL), + boolCriterionHandler(filter.Organized, "galleries.organized", nil), + qb.missingCriterionHandler(filter.IsMissing), + qb.tagsCriterionHandler(filter.Tags), + qb.tagCountCriterionHandler(filter.TagCount), + qb.performersCriterionHandler(filter.Performers), + qb.performerCountCriterionHandler(filter.PerformerCount), + qb.scenesCriterionHandler(filter.Scenes), + qb.hasChaptersCriterionHandler(filter.HasChapters), + studioCriterionHandler(galleryTable, filter.Studios), + qb.performerTagsCriterionHandler(filter.PerformerTags), + qb.averageResolutionCriterionHandler(filter.AverageResolution), + qb.imageCountCriterionHandler(filter.ImageCount), + qb.performerFavoriteCriterionHandler(filter.PerformerFavorite), + qb.performerAgeCriterionHandler(filter.PerformerAge), + &dateCriterionHandler{filter.Date, "galleries.date", nil}, + ×tampCriterionHandler{filter.CreatedAt, "galleries.created_at", nil}, + ×tampCriterionHandler{filter.UpdatedAt, "galleries.updated_at", nil}, + + &relatedFilterHandler{ + relatedIDCol: "scenes_galleries.scene_id", + relatedRepo: sceneRepository.repository, + relatedHandler: &sceneFilterHandler{filter.ScenesFilter}, + joinFn: func(f *filterBuilder) { + galleryRepository.scenes.innerJoin(f, "", "galleries.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "galleries_images.image_id", + relatedRepo: imageRepository.repository, + relatedHandler: &imageFilterHandler{filter.ImagesFilter}, + joinFn: func(f *filterBuilder) { + galleryRepository.images.innerJoin(f, "", "galleries.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performers_join.performer_id", + relatedRepo: performerRepository.repository, + relatedHandler: &performerFilterHandler{filter.PerformersFilter}, + joinFn: func(f *filterBuilder) { + galleryRepository.performers.innerJoin(f, "performers_join", "galleries.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "galleries.studio_id", + relatedRepo: studioRepository.repository, + relatedHandler: &studioFilterHandler{filter.StudiosFilter}, + }, + + &relatedFilterHandler{ + relatedIDCol: "gallery_tag.tag_id", + relatedRepo: tagRepository.repository, + relatedHandler: &tagFilterHandler{filter.TagsFilter}, + joinFn: func(f *filterBuilder) { + galleryRepository.tags.innerJoin(f, "gallery_tag", "galleries.id") + }, + }, + } +} + +func (qb *galleryFilterHandler) urlsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: galleryTable, + primaryFK: galleryIDColumn, + joinTable: galleriesURLsTable, + stringColumn: galleriesURLColumn, + addJoinTable: func(f *filterBuilder) { + galleriesURLsTableMgr.join(f, "", "galleries.id") + }, + } + + return h.handler(url) +} + +func (qb *galleryFilterHandler) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { + return multiCriterionHandlerBuilder{ + primaryTable: galleryTable, + foreignTable: foreignTable, + joinTable: joinTable, + primaryFK: galleryIDColumn, + foreignFK: foreignFK, + addJoinsFunc: addJoinsFunc, + } +} + +func (qb *galleryFilterHandler) pathCriterionHandler(c *models.StringCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if c != nil { + galleryRepository.addFoldersTable(f) + f.addLeftJoin(folderTable, "gallery_folder", "galleries.folder_id = gallery_folder.id") + + const pathColumn = "folders.path" + const basenameColumn = "files.basename" + const folderPathColumn = "gallery_folder.path" + + addWildcards := true + not := false + + if modifier := c.Modifier; c.Modifier.IsValid() { + switch modifier { + case models.CriterionModifierIncludes: + clause := getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not) + clause2 := getStringSearchClause([]string{folderPathColumn}, c.Value, false) + f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) + case models.CriterionModifierExcludes: + not = true + clause := getPathSearchClauseMany(pathColumn, basenameColumn, c.Value, addWildcards, not) + clause2 := getStringSearchClause([]string{folderPathColumn}, c.Value, true) + f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) + case models.CriterionModifierEquals: + addWildcards = false + clause := getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not) + clause2 := makeClause(folderPathColumn+" LIKE ?", c.Value) + f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) + case models.CriterionModifierNotEquals: + addWildcards = false + not = true + clause := getPathSearchClause(pathColumn, basenameColumn, c.Value, addWildcards, not) + clause2 := makeClause(folderPathColumn+" NOT LIKE ?", c.Value) + f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) + case models.CriterionModifierMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) + clause := makeClause(fmt.Sprintf("%s IS NOT NULL AND %s IS NOT NULL AND %s regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) + clause2 := makeClause(fmt.Sprintf("%s IS NOT NULL AND %[1]s regexp ?", folderPathColumn), c.Value) + f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) + case models.CriterionModifierNotMatchesRegex: + if _, err := regexp.Compile(c.Value); err != nil { + f.setError(err) + return + } + filepathColumn := fmt.Sprintf("%s || '%s' || %s", pathColumn, string(filepath.Separator), basenameColumn) + f.addWhere(fmt.Sprintf("%s IS NULL OR %s IS NULL OR %s NOT regexp ?", pathColumn, basenameColumn, filepathColumn), c.Value) + f.addWhere(fmt.Sprintf("%s IS NULL OR %[1]s NOT regexp ?", folderPathColumn), c.Value) + case models.CriterionModifierIsNull: + f.addWhere(fmt.Sprintf("%s IS NULL OR TRIM(%[1]s) = '' OR %s IS NULL OR TRIM(%[2]s) = ''", pathColumn, basenameColumn)) + f.addWhere(fmt.Sprintf("%s IS NULL OR TRIM(%[1]s) = ''", folderPathColumn)) + case models.CriterionModifierNotNull: + clause := makeClause(fmt.Sprintf("%s IS NOT NULL AND TRIM(%[1]s) != '' AND %s IS NOT NULL AND TRIM(%[2]s) != ''", pathColumn, basenameColumn)) + clause2 := makeClause(fmt.Sprintf("%s IS NOT NULL AND TRIM(%[1]s) != ''", folderPathColumn)) + f.whereClauses = append(f.whereClauses, orClauses(clause, clause2)) + default: + panic("unsupported string filter modifier") + } + } + } + } +} + +func (qb *galleryFilterHandler) fileCountCriterionHandler(fileCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: galleryTable, + joinTable: galleriesFilesTable, + primaryFK: galleryIDColumn, + } + + return h.handler(fileCount) +} + +func (qb *galleryFilterHandler) missingCriterionHandler(isMissing *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "url": + galleriesURLsTableMgr.join(f, "", "galleries.id") + f.addWhere("gallery_urls.url IS NULL") + case "scenes": + f.addLeftJoin("scenes_galleries", "scenes_join", "scenes_join.gallery_id = galleries.id") + f.addWhere("scenes_join.gallery_id IS NULL") + case "studio": + f.addWhere("galleries.studio_id IS NULL") + case "performers": + galleryRepository.performers.join(f, "performers_join", "galleries.id") + f.addWhere("performers_join.gallery_id IS NULL") + case "date": + f.addWhere("galleries.date IS NULL OR galleries.date IS \"\"") + case "tags": + galleryRepository.tags.join(f, "tags_join", "galleries.id") + f.addWhere("tags_join.gallery_id IS NULL") + default: + f.addWhere("(galleries." + *isMissing + " IS NULL OR TRIM(galleries." + *isMissing + ") = '')") + } + } + } +} + +func (qb *galleryFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: galleryTable, + foreignTable: tagTable, + foreignFK: "tag_id", + + relationsTable: "tags_relations", + joinAs: "gallery_tag", + joinTable: galleriesTagsTable, + primaryFK: galleryIDColumn, + } + + return h.handler(tags) +} + +func (qb *galleryFilterHandler) tagCountCriterionHandler(tagCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: galleryTable, + joinTable: galleriesTagsTable, + primaryFK: galleryIDColumn, + } + + return h.handler(tagCount) +} + +func (qb *galleryFilterHandler) scenesCriterionHandler(scenes *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + galleryRepository.scenes.join(f, "", "galleries.id") + f.addLeftJoin("scenes", "", "scenes_galleries.scene_id = scenes.id") + } + h := qb.getMultiCriterionHandlerBuilder(sceneTable, galleriesScenesTable, "scene_id", addJoinsFunc) + return h.handler(scenes) +} + +func (qb *galleryFilterHandler) performersCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { + h := joinedMultiCriterionHandlerBuilder{ + primaryTable: galleryTable, + joinTable: performersGalleriesTable, + joinAs: "performers_join", + primaryFK: galleryIDColumn, + foreignFK: performerIDColumn, + + addJoinTable: func(f *filterBuilder) { + galleryRepository.performers.join(f, "performers_join", "galleries.id") + }, + } + + return h.handler(performers) +} + +func (qb *galleryFilterHandler) performerCountCriterionHandler(performerCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: galleryTable, + joinTable: performersGalleriesTable, + primaryFK: galleryIDColumn, + } + + return h.handler(performerCount) +} + +func (qb *galleryFilterHandler) imageCountCriterionHandler(imageCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: galleryTable, + joinTable: galleriesImagesTable, + primaryFK: galleryIDColumn, + } + + return h.handler(imageCount) +} + +func (qb *galleryFilterHandler) hasChaptersCriterionHandler(hasChapters *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if hasChapters != nil { + f.addLeftJoin("galleries_chapters", "", "galleries_chapters.gallery_id = galleries.id") + if *hasChapters == "true" { + f.addHaving("count(galleries_chapters.gallery_id) > 0") + } else { + f.addWhere("galleries_chapters.id IS NULL") + } + } + } +} + +func (qb *galleryFilterHandler) performerTagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandler { + return &joinedPerformerTagsHandler{ + criterion: tags, + primaryTable: galleryTable, + joinTable: performersGalleriesTable, + joinPrimaryKey: galleryIDColumn, + } +} + +func (qb *galleryFilterHandler) performerFavoriteCriterionHandler(performerfavorite *bool) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performerfavorite != nil { + f.addLeftJoin("performers_galleries", "", "galleries.id = performers_galleries.gallery_id") + + if *performerfavorite { + // contains at least one favorite + f.addLeftJoin("performers", "", "performers.id = performers_galleries.performer_id") + f.addWhere("performers.favorite = 1") + } else { + // contains zero favorites + f.addLeftJoin(`(SELECT performers_galleries.gallery_id as id FROM performers_galleries +JOIN performers ON performers.id = performers_galleries.performer_id +GROUP BY performers_galleries.gallery_id HAVING SUM(performers.favorite) = 0)`, "nofaves", "galleries.id = nofaves.id") + f.addWhere("performers_galleries.gallery_id IS NULL OR nofaves.id IS NOT NULL") + } + } + } +} + +func (qb *galleryFilterHandler) performerAgeCriterionHandler(performerAge *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performerAge != nil { + f.addInnerJoin("performers_galleries", "", "galleries.id = performers_galleries.gallery_id") + f.addInnerJoin("performers", "", "performers_galleries.performer_id = performers.id") + + f.addWhere("galleries.date != '' AND performers.birthdate != ''") + f.addWhere("galleries.date IS NOT NULL AND performers.birthdate IS NOT NULL") + + ageCalc := "cast(strftime('%Y.%m%d', galleries.date) - strftime('%Y.%m%d', performers.birthdate) as int)" + whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2) + f.addWhere(whereClause, args...) + } + } +} + +func (qb *galleryFilterHandler) averageResolutionCriterionHandler(resolution *models.ResolutionCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if resolution != nil && resolution.Value.IsValid() { + galleryRepository.images.join(f, "images_join", "galleries.id") + f.addLeftJoin("images", "", "images_join.image_id = images.id") + f.addLeftJoin("images_files", "", "images.id = images_files.image_id") + f.addLeftJoin("image_files", "", "images_files.file_id = image_files.file_id") + + min := resolution.Value.GetMinResolution() + max := resolution.Value.GetMaxResolution() + + const widthHeight = "avg(MIN(image_files.width, image_files.height))" + + switch resolution.Modifier { + case models.CriterionModifierEquals: + f.addHaving(fmt.Sprintf("%s BETWEEN %d AND %d", widthHeight, min, max)) + case models.CriterionModifierNotEquals: + f.addHaving(fmt.Sprintf("%s NOT BETWEEN %d AND %d", widthHeight, min, max)) + case models.CriterionModifierLessThan: + f.addHaving(fmt.Sprintf("%s < %d", widthHeight, min)) + case models.CriterionModifierGreaterThan: + f.addHaving(fmt.Sprintf("%s > %d", widthHeight, max)) + } + } + } +} diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index c57ba08b84a..be1edb687ae 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -1534,10 +1534,12 @@ func TestGalleryQueryPathOr(t *testing.T) { Value: gallery1Path, Modifier: models.CriterionModifierEquals, }, - Or: &models.GalleryFilterType{ - Path: &models.StringCriterionInput{ - Value: gallery2Path, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.GalleryFilterType]{ + Or: &models.GalleryFilterType{ + Path: &models.StringCriterionInput{ + Value: gallery2Path, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -1568,10 +1570,12 @@ func TestGalleryQueryPathAndRating(t *testing.T) { Value: galleryPath, Modifier: models.CriterionModifierEquals, }, - And: &models.GalleryFilterType{ - Rating100: &models.IntCriterionInput{ - Value: *galleryRating, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.GalleryFilterType]{ + And: &models.GalleryFilterType{ + Rating100: &models.IntCriterionInput{ + Value: *galleryRating, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -1609,8 +1613,10 @@ func TestGalleryQueryPathNotRating(t *testing.T) { galleryFilter := models.GalleryFilterType{ Path: &pathCriterion, - Not: &models.GalleryFilterType{ - Rating100: &ratingCriterion, + OperatorFilter: models.OperatorFilter[models.GalleryFilterType]{ + Not: &models.GalleryFilterType{ + Rating100: &ratingCriterion, + }, }, } @@ -1641,8 +1647,10 @@ func TestGalleryIllegalQuery(t *testing.T) { } galleryFilter := &models.GalleryFilterType{ - And: &subFilter, - Or: &subFilter, + OperatorFilter: models.OperatorFilter[models.GalleryFilterType]{ + And: &subFilter, + Or: &subFilter, + }, } withTxn(func(ctx context.Context) error { @@ -1873,7 +1881,7 @@ func TestGalleryQueryIsMissingPerformers(t *testing.T) { assert.True(t, len(galleries) > 0) - // ensure non of the ids equal the one with movies + // ensure non of the ids equal the one with galleries for _, gallery := range galleries { assert.NotEqual(t, galleryIDs[galleryIdxWithPerformer], gallery.ID) } @@ -2965,6 +2973,34 @@ func TestGalleryQueryHasChapters(t *testing.T) { }) } +func TestGallerySetAndResetCover(t *testing.T) { + withTxn(func(ctx context.Context) error { + sqb := db.Gallery + + imagePath2 := getFilePath(folderIdxWithImageFiles, getImageBasename(imageIdx2WithGallery)) + + result, err := db.Image.CoverByGalleryID(ctx, galleryIDs[galleryIdxWithTwoImages]) + assert.Nil(t, err) + assert.Nil(t, result) + + err = sqb.SetCover(ctx, galleryIDs[galleryIdxWithTwoImages], imageIDs[imageIdx2WithGallery]) + assert.Nil(t, err) + + result, err = db.Image.CoverByGalleryID(ctx, galleryIDs[galleryIdxWithTwoImages]) + assert.Nil(t, err) + assert.Equal(t, result.Path, imagePath2) + + err = sqb.ResetCover(ctx, galleryIDs[galleryIdxWithTwoImages]) + assert.Nil(t, err) + + result, err = db.Image.CoverByGalleryID(ctx, galleryIDs[galleryIdxWithTwoImages]) + assert.Nil(t, err) + assert.Nil(t, result) + + return nil + }) +} + // TODO Count // TODO All // TODO Query diff --git a/pkg/sqlite/group.go b/pkg/sqlite/group.go new file mode 100644 index 00000000000..603494fe71a --- /dev/null +++ b/pkg/sqlite/group.go @@ -0,0 +1,703 @@ +package sqlite + +import ( + "context" + "database/sql" + "errors" + "fmt" + + "github.com/doug-martin/goqu/v9" + "github.com/doug-martin/goqu/v9/exp" + "github.com/jmoiron/sqlx" + "gopkg.in/guregu/null.v4" + "gopkg.in/guregu/null.v4/zero" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil" +) + +const ( + groupTable = "groups" + groupIDColumn = "group_id" + + groupFrontImageBlobColumn = "front_image_blob" + groupBackImageBlobColumn = "back_image_blob" + + groupsTagsTable = "groups_tags" + + groupURLsTable = "group_urls" + groupURLColumn = "url" + + groupRelationsTable = "groups_relations" +) + +type groupRow struct { + ID int `db:"id" goqu:"skipinsert"` + Name zero.String `db:"name"` + Aliases zero.String `db:"aliases"` + Duration null.Int `db:"duration"` + Date NullDate `db:"date"` + // expressed as 1-100 + Rating null.Int `db:"rating"` + StudioID null.Int `db:"studio_id,omitempty"` + Director zero.String `db:"director"` + Description zero.String `db:"description"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` + + // not used in resolutions or updates + FrontImageBlob zero.String `db:"front_image_blob"` + BackImageBlob zero.String `db:"back_image_blob"` +} + +func (r *groupRow) fromGroup(o models.Group) { + r.ID = o.ID + r.Name = zero.StringFrom(o.Name) + r.Aliases = zero.StringFrom(o.Aliases) + r.Duration = intFromPtr(o.Duration) + r.Date = NullDateFromDatePtr(o.Date) + r.Rating = intFromPtr(o.Rating) + r.StudioID = intFromPtr(o.StudioID) + r.Director = zero.StringFrom(o.Director) + r.Description = zero.StringFrom(o.Synopsis) + r.CreatedAt = Timestamp{Timestamp: o.CreatedAt} + r.UpdatedAt = Timestamp{Timestamp: o.UpdatedAt} +} + +func (r *groupRow) resolve() *models.Group { + ret := &models.Group{ + ID: r.ID, + Name: r.Name.String, + Aliases: r.Aliases.String, + Duration: nullIntPtr(r.Duration), + Date: r.Date.DatePtr(), + Rating: nullIntPtr(r.Rating), + StudioID: nullIntPtr(r.StudioID), + Director: r.Director.String, + Synopsis: r.Description.String, + CreatedAt: r.CreatedAt.Timestamp, + UpdatedAt: r.UpdatedAt.Timestamp, + } + + return ret +} + +type groupRowRecord struct { + updateRecord +} + +func (r *groupRowRecord) fromPartial(o models.GroupPartial) { + r.setNullString("name", o.Name) + r.setNullString("aliases", o.Aliases) + r.setNullInt("duration", o.Duration) + r.setNullDate("date", o.Date) + r.setNullInt("rating", o.Rating) + r.setNullInt("studio_id", o.StudioID) + r.setNullString("director", o.Director) + r.setNullString("description", o.Synopsis) + r.setTimestamp("created_at", o.CreatedAt) + r.setTimestamp("updated_at", o.UpdatedAt) +} + +type groupRepositoryType struct { + repository + scenes repository + tags joinRepository +} + +var ( + groupRepository = groupRepositoryType{ + repository: repository{ + tableName: groupTable, + idColumn: idColumn, + }, + scenes: repository{ + tableName: groupsScenesTable, + idColumn: groupIDColumn, + }, + tags: joinRepository{ + repository: repository{ + tableName: groupsTagsTable, + idColumn: groupIDColumn, + }, + fkColumn: tagIDColumn, + foreignTable: tagTable, + orderBy: "tags.name ASC", + }, + } +) + +type GroupStore struct { + blobJoinQueryBuilder + tagRelationshipStore + groupRelationshipStore + + tableMgr *table +} + +func NewGroupStore(blobStore *BlobStore) *GroupStore { + return &GroupStore{ + blobJoinQueryBuilder: blobJoinQueryBuilder{ + blobStore: blobStore, + joinTable: groupTable, + }, + tagRelationshipStore: tagRelationshipStore{ + idRelationshipStore: idRelationshipStore{ + joinTable: groupsTagsTableMgr, + }, + }, + groupRelationshipStore: groupRelationshipStore{ + table: groupRelationshipTableMgr, + }, + + tableMgr: groupTableMgr, + } +} + +func (qb *GroupStore) table() exp.IdentifierExpression { + return qb.tableMgr.table +} + +func (qb *GroupStore) selectDataset() *goqu.SelectDataset { + return dialect.From(qb.table()).Select(qb.table().All()) +} + +func (qb *GroupStore) Create(ctx context.Context, newObject *models.Group) error { + var r groupRow + r.fromGroup(*newObject) + + id, err := qb.tableMgr.insertID(ctx, r) + if err != nil { + return err + } + + if newObject.URLs.Loaded() { + const startPos = 0 + if err := groupsURLsTableMgr.insertJoins(ctx, id, startPos, newObject.URLs.List()); err != nil { + return err + } + } + + if err := qb.tagRelationshipStore.createRelationships(ctx, id, newObject.TagIDs); err != nil { + return err + } + + if err := qb.groupRelationshipStore.createContainingRelationships(ctx, id, newObject.ContainingGroups); err != nil { + return err + } + + if err := qb.groupRelationshipStore.createSubRelationships(ctx, id, newObject.SubGroups); err != nil { + return err + } + + updated, err := qb.find(ctx, id) + if err != nil { + return fmt.Errorf("finding after create: %w", err) + } + + *newObject = *updated + + return nil +} + +func (qb *GroupStore) UpdatePartial(ctx context.Context, id int, partial models.GroupPartial) (*models.Group, error) { + r := groupRowRecord{ + updateRecord{ + Record: make(exp.Record), + }, + } + + r.fromPartial(partial) + + if len(r.Record) > 0 { + if err := qb.tableMgr.updateByID(ctx, id, r.Record); err != nil { + return nil, err + } + } + + if partial.URLs != nil { + if err := groupsURLsTableMgr.modifyJoins(ctx, id, partial.URLs.Values, partial.URLs.Mode); err != nil { + return nil, err + } + } + + if err := qb.tagRelationshipStore.modifyRelationships(ctx, id, partial.TagIDs); err != nil { + return nil, err + } + + if err := qb.groupRelationshipStore.modifyContainingRelationships(ctx, id, partial.ContainingGroups); err != nil { + return nil, err + } + + if err := qb.groupRelationshipStore.modifySubRelationships(ctx, id, partial.SubGroups); err != nil { + return nil, err + } + + return qb.find(ctx, id) +} + +func (qb *GroupStore) Update(ctx context.Context, updatedObject *models.Group) error { + var r groupRow + r.fromGroup(*updatedObject) + + if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { + return err + } + + if updatedObject.URLs.Loaded() { + if err := groupsURLsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.URLs.List()); err != nil { + return err + } + } + + if err := qb.tagRelationshipStore.replaceRelationships(ctx, updatedObject.ID, updatedObject.TagIDs); err != nil { + return err + } + + if err := qb.groupRelationshipStore.replaceContainingRelationships(ctx, updatedObject.ID, updatedObject.ContainingGroups); err != nil { + return err + } + + if err := qb.groupRelationshipStore.replaceSubRelationships(ctx, updatedObject.ID, updatedObject.SubGroups); err != nil { + return err + } + + return nil +} + +func (qb *GroupStore) Destroy(ctx context.Context, id int) error { + // must handle image checksums manually + if err := qb.destroyImages(ctx, id); err != nil { + return err + } + + return groupRepository.destroyExisting(ctx, []int{id}) +} + +// returns nil, nil if not found +func (qb *GroupStore) Find(ctx context.Context, id int) (*models.Group, error) { + ret, err := qb.find(ctx, id) + if errors.Is(err, sql.ErrNoRows) { + return nil, nil + } + return ret, err +} + +func (qb *GroupStore) FindMany(ctx context.Context, ids []int) ([]*models.Group, error) { + ret := make([]*models.Group, len(ids)) + + table := qb.table() + if err := batchExec(ids, defaultBatchSize, func(batch []int) error { + q := qb.selectDataset().Prepared(true).Where(table.Col(idColumn).In(batch)) + unsorted, err := qb.getMany(ctx, q) + if err != nil { + return err + } + + for _, s := range unsorted { + i := sliceutil.Index(ids, s.ID) + ret[i] = s + } + + return nil + }); err != nil { + return nil, err + } + + for i := range ret { + if ret[i] == nil { + return nil, fmt.Errorf("group with id %d not found", ids[i]) + } + } + + return ret, nil +} + +// returns nil, sql.ErrNoRows if not found +func (qb *GroupStore) find(ctx context.Context, id int) (*models.Group, error) { + q := qb.selectDataset().Where(qb.tableMgr.byID(id)) + + ret, err := qb.get(ctx, q) + if err != nil { + return nil, err + } + + return ret, nil +} + +// returns nil, sql.ErrNoRows if not found +func (qb *GroupStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Group, error) { + ret, err := qb.getMany(ctx, q) + if err != nil { + return nil, err + } + + if len(ret) == 0 { + return nil, sql.ErrNoRows + } + + return ret[0], nil +} + +func (qb *GroupStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Group, error) { + const single = false + var ret []*models.Group + if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { + var f groupRow + if err := r.StructScan(&f); err != nil { + return err + } + + s := f.resolve() + + ret = append(ret, s) + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (qb *GroupStore) FindByName(ctx context.Context, name string, nocase bool) (*models.Group, error) { + // query := "SELECT * FROM groups WHERE name = ?" + // if nocase { + // query += " COLLATE NOCASE" + // } + // query += " LIMIT 1" + where := "name = ?" + if nocase { + where += " COLLATE NOCASE" + } + sq := qb.selectDataset().Prepared(true).Where(goqu.L(where, name)).Limit(1) + ret, err := qb.get(ctx, sq) + + if err != nil && !errors.Is(err, sql.ErrNoRows) { + return nil, err + } + + return ret, nil +} + +func (qb *GroupStore) FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Group, error) { + // query := "SELECT * FROM groups WHERE name" + // if nocase { + // query += " COLLATE NOCASE" + // } + // query += " IN " + getInBinding(len(names)) + where := "name" + if nocase { + where += " COLLATE NOCASE" + } + where += " IN " + getInBinding(len(names)) + var args []interface{} + for _, name := range names { + args = append(args, name) + } + sq := qb.selectDataset().Prepared(true).Where(goqu.L(where, args...)) + ret, err := qb.getMany(ctx, sq) + + if err != nil { + return nil, err + } + + return ret, nil +} + +func (qb *GroupStore) Count(ctx context.Context) (int, error) { + q := dialect.Select(goqu.COUNT("*")).From(qb.table()) + return count(ctx, q) +} + +func (qb *GroupStore) All(ctx context.Context) ([]*models.Group, error) { + table := qb.table() + + return qb.getMany(ctx, qb.selectDataset().Order( + table.Col("name").Asc(), + table.Col(idColumn).Asc(), + )) +} + +func (qb *GroupStore) makeQuery(ctx context.Context, groupFilter *models.GroupFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { + if findFilter == nil { + findFilter = &models.FindFilterType{} + } + if groupFilter == nil { + groupFilter = &models.GroupFilterType{} + } + + query := groupRepository.newQuery() + distinctIDs(&query, groupTable) + + if q := findFilter.Q; q != nil && *q != "" { + searchColumns := []string{"groups.name", "groups.aliases"} + query.parseQueryString(searchColumns, *q) + } + + filter := filterBuilderFromHandler(ctx, &groupFilterHandler{ + groupFilter: groupFilter, + }) + + if err := query.addFilter(filter); err != nil { + return nil, err + } + + if err := qb.setGroupSort(&query, findFilter); err != nil { + return nil, err + } + + query.sortAndPagination += getPagination(findFilter) + + return &query, nil +} + +func (qb *GroupStore) Query(ctx context.Context, groupFilter *models.GroupFilterType, findFilter *models.FindFilterType) ([]*models.Group, int, error) { + query, err := qb.makeQuery(ctx, groupFilter, findFilter) + if err != nil { + return nil, 0, err + } + + idsResult, countResult, err := query.executeFind(ctx) + if err != nil { + return nil, 0, err + } + + groups, err := qb.FindMany(ctx, idsResult) + if err != nil { + return nil, 0, err + } + + return groups, countResult, nil +} + +func (qb *GroupStore) QueryCount(ctx context.Context, groupFilter *models.GroupFilterType, findFilter *models.FindFilterType) (int, error) { + query, err := qb.makeQuery(ctx, groupFilter, findFilter) + if err != nil { + return 0, err + } + + return query.executeCount(ctx) +} + +var groupSortOptions = sortOptions{ + "created_at", + "date", + "duration", + "id", + "name", + "random", + "rating", + "scenes_count", + "sub_group_order", + "tag_count", + "updated_at", +} + +func (qb *GroupStore) setGroupSort(query *queryBuilder, findFilter *models.FindFilterType) error { + var sort string + var direction string + if findFilter == nil { + sort = "name" + direction = "ASC" + } else { + sort = findFilter.GetSort("name") + direction = findFilter.GetDirection() + } + + // CVE-2024-32231 - ensure sort is in the list of allowed sorts + if err := groupSortOptions.validateSort(sort); err != nil { + return err + } + + switch sort { + case "sub_group_order": + // sub_group_order is a special sort that sorts by the order_index of the subgroups + if query.hasJoin("groups_parents") { + query.sortAndPagination += getSort("order_index", direction, "groups_parents") + } else { + // this will give unexpected results if the query is not filtered by a parent group and + // the group has multiple parents and order indexes + query.join(groupRelationsTable, "", "groups.id = groups_relations.sub_id") + query.sortAndPagination += getSort("order_index", direction, groupRelationsTable) + } + case "tag_count": + query.sortAndPagination += getCountSort(groupTable, groupsTagsTable, groupIDColumn, direction) + case "scenes_count": // generic getSort won't work for this + query.sortAndPagination += getCountSort(groupTable, groupsScenesTable, groupIDColumn, direction) + default: + query.sortAndPagination += getSort(sort, direction, "groups") + } + + // Whatever the sorting, always use name/id as a final sort + query.sortAndPagination += ", COALESCE(groups.name, groups.id) COLLATE NATURAL_CI ASC" + return nil +} + +func (qb *GroupStore) queryGroups(ctx context.Context, query string, args []interface{}) ([]*models.Group, error) { + const single = false + var ret []*models.Group + if err := groupRepository.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { + var f groupRow + if err := r.StructScan(&f); err != nil { + return err + } + + s := f.resolve() + + ret = append(ret, s) + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +func (qb *GroupStore) UpdateFrontImage(ctx context.Context, groupID int, frontImage []byte) error { + return qb.UpdateImage(ctx, groupID, groupFrontImageBlobColumn, frontImage) +} + +func (qb *GroupStore) UpdateBackImage(ctx context.Context, groupID int, backImage []byte) error { + return qb.UpdateImage(ctx, groupID, groupBackImageBlobColumn, backImage) +} + +func (qb *GroupStore) destroyImages(ctx context.Context, groupID int) error { + if err := qb.DestroyImage(ctx, groupID, groupFrontImageBlobColumn); err != nil { + return err + } + if err := qb.DestroyImage(ctx, groupID, groupBackImageBlobColumn); err != nil { + return err + } + + return nil +} + +func (qb *GroupStore) GetFrontImage(ctx context.Context, groupID int) ([]byte, error) { + return qb.GetImage(ctx, groupID, groupFrontImageBlobColumn) +} + +func (qb *GroupStore) HasFrontImage(ctx context.Context, groupID int) (bool, error) { + return qb.HasImage(ctx, groupID, groupFrontImageBlobColumn) +} + +func (qb *GroupStore) GetBackImage(ctx context.Context, groupID int) ([]byte, error) { + return qb.GetImage(ctx, groupID, groupBackImageBlobColumn) +} + +func (qb *GroupStore) HasBackImage(ctx context.Context, groupID int) (bool, error) { + return qb.HasImage(ctx, groupID, groupBackImageBlobColumn) +} + +func (qb *GroupStore) FindByPerformerID(ctx context.Context, performerID int) ([]*models.Group, error) { + query := `SELECT DISTINCT groups.* +FROM groups +INNER JOIN groups_scenes ON groups.id = groups_scenes.group_id +INNER JOIN performers_scenes ON performers_scenes.scene_id = groups_scenes.scene_id +WHERE performers_scenes.performer_id = ? +` + args := []interface{}{performerID} + return qb.queryGroups(ctx, query, args) +} + +func (qb *GroupStore) CountByPerformerID(ctx context.Context, performerID int) (int, error) { + query := `SELECT COUNT(DISTINCT groups_scenes.group_id) AS count +FROM groups_scenes +INNER JOIN performers_scenes ON performers_scenes.scene_id = groups_scenes.scene_id +WHERE performers_scenes.performer_id = ? +` + args := []interface{}{performerID} + return groupRepository.runCountQuery(ctx, query, args) +} + +func (qb *GroupStore) FindByStudioID(ctx context.Context, studioID int) ([]*models.Group, error) { + query := `SELECT groups.* +FROM groups +WHERE groups.studio_id = ? +` + args := []interface{}{studioID} + return qb.queryGroups(ctx, query, args) +} + +func (qb *GroupStore) CountByStudioID(ctx context.Context, studioID int) (int, error) { + query := `SELECT COUNT(1) AS count +FROM groups +WHERE groups.studio_id = ? +` + args := []interface{}{studioID} + return groupRepository.runCountQuery(ctx, query, args) +} + +func (qb *GroupStore) GetURLs(ctx context.Context, groupID int) ([]string, error) { + return groupsURLsTableMgr.get(ctx, groupID) +} + +// FindSubGroupIDs returns a list of group IDs where a group in the ids list is a sub-group of the parent group +func (qb *GroupStore) FindSubGroupIDs(ctx context.Context, containingID int, ids []int) ([]int, error) { + /* + SELECT gr.sub_id FROM groups_relations gr + WHERE gr.containing_id = :parentID AND gr.sub_id IN (:ids); + */ + table := groupRelationshipTableMgr.table + q := dialect.From(table).Prepared(true). + Select(table.Col("sub_id")).Where( + table.Col("containing_id").Eq(containingID), + table.Col("sub_id").In(ids), + ) + + const single = false + var ret []int + if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { + var id int + if err := r.Scan(&id); err != nil { + return err + } + + ret = append(ret, id) + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} + +// FindInAscestors returns a list of group IDs where a group in the ids list is an ascestor of the ancestor group IDs +func (qb *GroupStore) FindInAncestors(ctx context.Context, ascestorIDs []int, ids []int) ([]int, error) { + /* + WITH RECURSIVE ascestors AS ( + SELECT g.id AS parent_id FROM groups g WHERE g.id IN (:ascestorIDs) + UNION + SELECT gr.containing_id FROM groups_relations gr INNER JOIN ascestors a ON a.parent_id = gr.sub_id + ) + SELECT p.parent_id FROM ascestors p WHERE p.parent_id IN (:ids); + */ + table := qb.table() + const ascestors = "ancestors" + const parentID = "parent_id" + q := dialect.From(ascestors).Prepared(true). + WithRecursive(ascestors, + dialect.From(qb.table()).Select(table.Col(idColumn).As(parentID)). + Where(table.Col(idColumn).In(ascestorIDs)). + Union( + dialect.From(groupRelationsJoinTable).InnerJoin( + goqu.I(ascestors), goqu.On(goqu.I("parent_id").Eq(goqu.I("sub_id"))), + ).Select("containing_id"), + ), + ).Select(parentID).Where(goqu.I(parentID).In(ids)) + + const single = false + var ret []int + if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { + var id int + if err := r.Scan(&id); err != nil { + return err + } + + ret = append(ret, id) + return nil + }); err != nil { + return nil, err + } + + return ret, nil +} diff --git a/pkg/sqlite/group_filter.go b/pkg/sqlite/group_filter.go new file mode 100644 index 00000000000..dcb7bcdfc94 --- /dev/null +++ b/pkg/sqlite/group_filter.go @@ -0,0 +1,203 @@ +package sqlite + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/models" +) + +type groupFilterHandler struct { + groupFilter *models.GroupFilterType +} + +func (qb *groupFilterHandler) validate() error { + groupFilter := qb.groupFilter + if groupFilter == nil { + return nil + } + + if err := validateFilterCombination(groupFilter.OperatorFilter); err != nil { + return err + } + + if subFilter := groupFilter.SubFilter(); subFilter != nil { + sqb := &groupFilterHandler{groupFilter: subFilter} + if err := sqb.validate(); err != nil { + return err + } + } + + return nil +} + +func (qb *groupFilterHandler) handle(ctx context.Context, f *filterBuilder) { + groupFilter := qb.groupFilter + if groupFilter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + sf := groupFilter.SubFilter() + if sf != nil { + sub := &groupFilterHandler{sf} + handleSubFilter(ctx, sub, f, groupFilter.OperatorFilter) + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +var groupHierarchyHandler = hierarchicalRelationshipHandler{ + primaryTable: groupTable, + relationTable: groupRelationsTable, + aliasPrefix: groupTable, + parentIDCol: "containing_id", + childIDCol: "sub_id", +} + +func (qb *groupFilterHandler) criterionHandler() criterionHandler { + groupFilter := qb.groupFilter + return compoundHandler{ + stringCriterionHandler(groupFilter.Name, "groups.name"), + stringCriterionHandler(groupFilter.Director, "groups.director"), + stringCriterionHandler(groupFilter.Synopsis, "groups.description"), + intCriterionHandler(groupFilter.Rating100, "groups.rating", nil), + floatIntCriterionHandler(groupFilter.Duration, "groups.duration", nil), + qb.missingCriterionHandler(groupFilter.IsMissing), + qb.urlsCriterionHandler(groupFilter.URL), + studioCriterionHandler(groupTable, groupFilter.Studios), + qb.performersCriterionHandler(groupFilter.Performers), + qb.tagsCriterionHandler(groupFilter.Tags), + qb.tagCountCriterionHandler(groupFilter.TagCount), + &dateCriterionHandler{groupFilter.Date, "groups.date", nil}, + groupHierarchyHandler.ParentsCriterionHandler(groupFilter.ContainingGroups), + groupHierarchyHandler.ChildrenCriterionHandler(groupFilter.SubGroups), + groupHierarchyHandler.ParentCountCriterionHandler(groupFilter.ContainingGroupCount), + groupHierarchyHandler.ChildCountCriterionHandler(groupFilter.SubGroupCount), + ×tampCriterionHandler{groupFilter.CreatedAt, "groups.created_at", nil}, + ×tampCriterionHandler{groupFilter.UpdatedAt, "groups.updated_at", nil}, + + &relatedFilterHandler{ + relatedIDCol: "groups_scenes.scene_id", + relatedRepo: sceneRepository.repository, + relatedHandler: &sceneFilterHandler{groupFilter.ScenesFilter}, + joinFn: func(f *filterBuilder) { + groupRepository.scenes.innerJoin(f, "", "groups.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "groups.studio_id", + relatedRepo: studioRepository.repository, + relatedHandler: &studioFilterHandler{groupFilter.StudiosFilter}, + }, + } +} + +func (qb *groupFilterHandler) missingCriterionHandler(isMissing *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "front_image": + f.addWhere("groups.front_image_blob IS NULL") + case "back_image": + f.addWhere("groups.back_image_blob IS NULL") + case "scenes": + f.addLeftJoin("groups_scenes", "", "groups_scenes.group_id = groups.id") + f.addWhere("groups_scenes.scene_id IS NULL") + default: + f.addWhere("(groups." + *isMissing + " IS NULL OR TRIM(groups." + *isMissing + ") = '')") + } + } + } +} + +func (qb *groupFilterHandler) urlsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: groupTable, + primaryFK: groupIDColumn, + joinTable: groupURLsTable, + stringColumn: groupURLColumn, + addJoinTable: func(f *filterBuilder) { + groupsURLsTableMgr.join(f, "", "groups.id") + }, + } + + return h.handler(url) +} + +func (qb *groupFilterHandler) performersCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performers != nil { + if performers.Modifier == models.CriterionModifierIsNull || performers.Modifier == models.CriterionModifierNotNull { + var notClause string + if performers.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addLeftJoin("groups_scenes", "", "groups.id = groups_scenes.group_id") + f.addLeftJoin("performers_scenes", "", "groups_scenes.scene_id = performers_scenes.scene_id") + + f.addWhere(fmt.Sprintf("performers_scenes.performer_id IS %s NULL", notClause)) + return + } + + if len(performers.Value) == 0 { + return + } + + var args []interface{} + for _, arg := range performers.Value { + args = append(args, arg) + } + + // Hack, can't apply args to join, nor inner join on a left join, so use CTE instead + f.addWith(`groups_performers AS ( + SELECT groups_scenes.group_id, performers_scenes.performer_id + FROM groups_scenes + INNER JOIN performers_scenes ON groups_scenes.scene_id = performers_scenes.scene_id + WHERE performers_scenes.performer_id IN`+getInBinding(len(performers.Value))+` + )`, args...) + f.addLeftJoin("groups_performers", "", "groups.id = groups_performers.group_id") + + switch performers.Modifier { + case models.CriterionModifierIncludes: + f.addWhere("groups_performers.performer_id IS NOT NULL") + case models.CriterionModifierIncludesAll: + f.addWhere("groups_performers.performer_id IS NOT NULL") + f.addHaving("COUNT(DISTINCT groups_performers.performer_id) = ?", len(performers.Value)) + case models.CriterionModifierExcludes: + f.addWhere("groups_performers.performer_id IS NULL") + } + } + } +} + +func (qb *groupFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: groupTable, + foreignTable: tagTable, + foreignFK: "tag_id", + + relationsTable: "tags_relations", + joinAs: "group_tag", + joinTable: groupsTagsTable, + primaryFK: groupIDColumn, + } + + return h.handler(tags) +} + +func (qb *groupFilterHandler) tagCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: groupTable, + joinTable: groupsTagsTable, + primaryFK: groupIDColumn, + } + + return h.handler(count) +} diff --git a/pkg/sqlite/group_relationships.go b/pkg/sqlite/group_relationships.go new file mode 100644 index 00000000000..fe94394f905 --- /dev/null +++ b/pkg/sqlite/group_relationships.go @@ -0,0 +1,457 @@ +package sqlite + +import ( + "context" + "fmt" + + "github.com/doug-martin/goqu/v9" + "github.com/doug-martin/goqu/v9/exp" + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/models" + "gopkg.in/guregu/null.v4" + "gopkg.in/guregu/null.v4/zero" +) + +type groupRelationshipRow struct { + ContainingID int `db:"containing_id"` + SubID int `db:"sub_id"` + OrderIndex int `db:"order_index"` + Description zero.String `db:"description"` +} + +func (r groupRelationshipRow) resolve(useContainingID bool) models.GroupIDDescription { + id := r.ContainingID + if !useContainingID { + id = r.SubID + } + + return models.GroupIDDescription{ + GroupID: id, + Description: r.Description.String, + } +} + +type groupRelationshipStore struct { + table *table +} + +func (s *groupRelationshipStore) GetContainingGroupDescriptions(ctx context.Context, id int) ([]models.GroupIDDescription, error) { + const idIsContaining = false + return s.getGroupRelationships(ctx, id, idIsContaining) +} + +func (s *groupRelationshipStore) GetSubGroupDescriptions(ctx context.Context, id int) ([]models.GroupIDDescription, error) { + const idIsContaining = true + return s.getGroupRelationships(ctx, id, idIsContaining) +} + +func (s *groupRelationshipStore) getGroupRelationships(ctx context.Context, id int, idIsContaining bool) ([]models.GroupIDDescription, error) { + col := "containing_id" + if !idIsContaining { + col = "sub_id" + } + + table := s.table.table + q := dialect.Select(table.All()). + From(table). + Where(table.Col(col).Eq(id)). + Order(table.Col("order_index").Asc()) + + const single = false + var ret []models.GroupIDDescription + if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error { + var row groupRelationshipRow + if err := rows.StructScan(&row); err != nil { + return err + } + + ret = append(ret, row.resolve(!idIsContaining)) + + return nil + }); err != nil { + return nil, fmt.Errorf("getting group relationships from %s: %w", table.GetTable(), err) + } + + return ret, nil +} + +// getMaxOrderIndex gets the maximum order index for the containing group with the given id +func (s *groupRelationshipStore) getMaxOrderIndex(ctx context.Context, containingID int) (int, error) { + idColumn := s.table.table.Col("containing_id") + + q := dialect.Select(goqu.MAX("order_index")). + From(s.table.table). + Where(idColumn.Eq(containingID)) + + var maxOrderIndex zero.Int + if err := querySimple(ctx, q, &maxOrderIndex); err != nil { + return 0, fmt.Errorf("getting max order index: %w", err) + } + + return int(maxOrderIndex.Int64), nil +} + +// createRelationships creates relationships between a group and other groups. +// If idIsContaining is true, the provided id is the containing group. +func (s *groupRelationshipStore) createRelationships(ctx context.Context, id int, d models.RelatedGroupDescriptions, idIsContaining bool) error { + if d.Loaded() { + for i, v := range d.List() { + orderIndex := i + 1 + + r := groupRelationshipRow{ + ContainingID: id, + SubID: v.GroupID, + OrderIndex: orderIndex, + Description: zero.StringFrom(v.Description), + } + + if !idIsContaining { + // get the max order index of the containing groups sub groups + containingID := v.GroupID + maxOrderIndex, err := s.getMaxOrderIndex(ctx, containingID) + if err != nil { + return err + } + + r.ContainingID = v.GroupID + r.SubID = id + r.OrderIndex = maxOrderIndex + 1 + } + + _, err := s.table.insert(ctx, r) + if err != nil { + return fmt.Errorf("inserting into %s: %w", s.table.table.GetTable(), err) + } + } + + return nil + } + + return nil +} + +// createRelationships creates relationships between a group and other groups. +// If idIsContaining is true, the provided id is the containing group. +func (s *groupRelationshipStore) createContainingRelationships(ctx context.Context, id int, d models.RelatedGroupDescriptions) error { + const idIsContaining = false + return s.createRelationships(ctx, id, d, idIsContaining) +} + +// createRelationships creates relationships between a group and other groups. +// If idIsContaining is true, the provided id is the containing group. +func (s *groupRelationshipStore) createSubRelationships(ctx context.Context, id int, d models.RelatedGroupDescriptions) error { + const idIsContaining = true + return s.createRelationships(ctx, id, d, idIsContaining) +} + +func (s *groupRelationshipStore) replaceRelationships(ctx context.Context, id int, d models.RelatedGroupDescriptions, idIsContaining bool) error { + // always destroy the existing relationships even if the new list is empty + if err := s.destroyAllJoins(ctx, id, idIsContaining); err != nil { + return err + } + + return s.createRelationships(ctx, id, d, idIsContaining) +} + +func (s *groupRelationshipStore) replaceContainingRelationships(ctx context.Context, id int, d models.RelatedGroupDescriptions) error { + const idIsContaining = false + return s.replaceRelationships(ctx, id, d, idIsContaining) +} + +func (s *groupRelationshipStore) replaceSubRelationships(ctx context.Context, id int, d models.RelatedGroupDescriptions) error { + const idIsContaining = true + return s.replaceRelationships(ctx, id, d, idIsContaining) +} + +func (s *groupRelationshipStore) modifyRelationships(ctx context.Context, id int, v *models.UpdateGroupDescriptions, idIsContaining bool) error { + if v == nil { + return nil + } + + switch v.Mode { + case models.RelationshipUpdateModeSet: + return s.replaceJoins(ctx, id, *v, idIsContaining) + case models.RelationshipUpdateModeAdd: + return s.addJoins(ctx, id, v.Groups, idIsContaining) + case models.RelationshipUpdateModeRemove: + toRemove := make([]int, len(v.Groups)) + for i, vv := range v.Groups { + toRemove[i] = vv.GroupID + } + return s.destroyJoins(ctx, id, toRemove, idIsContaining) + } + + return nil +} + +func (s *groupRelationshipStore) modifyContainingRelationships(ctx context.Context, id int, v *models.UpdateGroupDescriptions) error { + const idIsContaining = false + return s.modifyRelationships(ctx, id, v, idIsContaining) +} + +func (s *groupRelationshipStore) modifySubRelationships(ctx context.Context, id int, v *models.UpdateGroupDescriptions) error { + const idIsContaining = true + return s.modifyRelationships(ctx, id, v, idIsContaining) +} + +func (s *groupRelationshipStore) addJoins(ctx context.Context, id int, groups []models.GroupIDDescription, idIsContaining bool) error { + // if we're adding to a containing group, get the max order index first + var maxOrderIndex int + if idIsContaining { + var err error + maxOrderIndex, err = s.getMaxOrderIndex(ctx, id) + if err != nil { + return err + } + } + + for i, vv := range groups { + r := groupRelationshipRow{ + Description: zero.StringFrom(vv.Description), + } + + if idIsContaining { + r.ContainingID = id + r.SubID = vv.GroupID + r.OrderIndex = maxOrderIndex + (i + 1) + } else { + // get the max order index of the containing groups sub groups + containingMaxOrderIndex, err := s.getMaxOrderIndex(ctx, vv.GroupID) + if err != nil { + return err + } + + r.ContainingID = vv.GroupID + r.SubID = id + r.OrderIndex = containingMaxOrderIndex + 1 + } + + _, err := s.table.insert(ctx, r) + if err != nil { + return fmt.Errorf("inserting into %s: %w", s.table.table.GetTable(), err) + } + } + + return nil +} + +func (s *groupRelationshipStore) destroyAllJoins(ctx context.Context, id int, idIsContaining bool) error { + table := s.table.table + idColumn := table.Col("containing_id") + if !idIsContaining { + idColumn = table.Col("sub_id") + } + + q := dialect.Delete(table).Where(idColumn.Eq(id)) + + if _, err := exec(ctx, q); err != nil { + return fmt.Errorf("destroying %s: %w", table.GetTable(), err) + } + + return nil +} + +func (s *groupRelationshipStore) replaceJoins(ctx context.Context, id int, v models.UpdateGroupDescriptions, idIsContaining bool) error { + if err := s.destroyAllJoins(ctx, id, idIsContaining); err != nil { + return err + } + + // convert to RelatedGroupDescriptions + rgd := models.NewRelatedGroupDescriptions(v.Groups) + return s.createRelationships(ctx, id, rgd, idIsContaining) +} + +func (s *groupRelationshipStore) destroyJoins(ctx context.Context, id int, toRemove []int, idIsContaining bool) error { + table := s.table.table + idColumn := table.Col("containing_id") + fkColumn := table.Col("sub_id") + if !idIsContaining { + idColumn = table.Col("sub_id") + fkColumn = table.Col("containing_id") + } + + q := dialect.Delete(table).Where(idColumn.Eq(id), fkColumn.In(toRemove)) + + if _, err := exec(ctx, q); err != nil { + return fmt.Errorf("destroying %s: %w", table.GetTable(), err) + } + + return nil +} + +func (s *groupRelationshipStore) getOrderIndexOfSubGroup(ctx context.Context, containingGroupID int, subGroupID int) (int, error) { + table := s.table.table + q := dialect.Select("order_index"). + From(table). + Where( + table.Col("containing_id").Eq(containingGroupID), + table.Col("sub_id").Eq(subGroupID), + ) + + var orderIndex null.Int + if err := querySimple(ctx, q, &orderIndex); err != nil { + return 0, fmt.Errorf("getting order index: %w", err) + } + + if !orderIndex.Valid { + return 0, fmt.Errorf("sub-group %d not found in containing group %d", subGroupID, containingGroupID) + } + + return int(orderIndex.Int64), nil +} + +func (s *groupRelationshipStore) getGroupIDAtOrderIndex(ctx context.Context, containingGroupID int, orderIndex int) (*int, error) { + table := s.table.table + q := dialect.Select(table.Col("sub_id")).From(table).Where( + table.Col("containing_id").Eq(containingGroupID), + table.Col("order_index").Eq(orderIndex), + ) + + var ret null.Int + if err := querySimple(ctx, q, &ret); err != nil { + return nil, fmt.Errorf("getting sub id for order index: %w", err) + } + + if !ret.Valid { + return nil, nil + } + + intRet := int(ret.Int64) + return &intRet, nil +} + +func (s *groupRelationshipStore) getOrderIndexAfterOrderIndex(ctx context.Context, containingGroupID int, orderIndex int) (int, error) { + table := s.table.table + q := dialect.Select(goqu.MIN("order_index")).From(table).Where( + table.Col("containing_id").Eq(containingGroupID), + table.Col("order_index").Gt(orderIndex), + ) + + var ret null.Int + if err := querySimple(ctx, q, &ret); err != nil { + return 0, fmt.Errorf("getting order index: %w", err) + } + + if !ret.Valid { + return orderIndex + 1, nil + } + + return int(ret.Int64), nil +} + +// incrementOrderIndexes increments the order_index value of all sub-groups in the containing group at or after the given index +func (s *groupRelationshipStore) incrementOrderIndexes(ctx context.Context, groupID int, indexBefore int) error { + table := s.table.table + + // WORKAROUND - sqlite won't allow incrementing the value directly since it causes a + // unique constraint violation. + // Instead, we first set the order index to a negative value temporarily + // see https://stackoverflow.com/a/7703239/695786 + q := dialect.Update(table).Set(exp.Record{ + "order_index": goqu.L("-order_index"), + }).Where( + table.Col("containing_id").Eq(groupID), + table.Col("order_index").Gte(indexBefore), + ) + + if _, err := exec(ctx, q); err != nil { + return fmt.Errorf("updating %s: %w", table.GetTable(), err) + } + + q = dialect.Update(table).Set(exp.Record{ + "order_index": goqu.L("1-order_index"), + }).Where( + table.Col("containing_id").Eq(groupID), + table.Col("order_index").Lt(0), + ) + + if _, err := exec(ctx, q); err != nil { + return fmt.Errorf("updating %s: %w", table.GetTable(), err) + } + + return nil +} + +func (s *groupRelationshipStore) reorderSubGroup(ctx context.Context, groupID int, subGroupID int, insertPointID int, insertAfter bool) error { + insertPointIndex, err := s.getOrderIndexOfSubGroup(ctx, groupID, insertPointID) + if err != nil { + return err + } + + // if we're setting before + if insertAfter { + insertPointIndex, err = s.getOrderIndexAfterOrderIndex(ctx, groupID, insertPointIndex) + if err != nil { + return err + } + } + + // increment the order index of all sub-groups after and including the insertion point + if err := s.incrementOrderIndexes(ctx, groupID, int(insertPointIndex)); err != nil { + return err + } + + // set the order index of the sub-group to the insertion point + table := s.table.table + q := dialect.Update(table).Set(exp.Record{ + "order_index": insertPointIndex, + }).Where( + table.Col("containing_id").Eq(groupID), + table.Col("sub_id").Eq(subGroupID), + ) + + if _, err := exec(ctx, q); err != nil { + return fmt.Errorf("updating %s: %w", table.GetTable(), err) + } + + return nil +} + +func (s *groupRelationshipStore) AddSubGroups(ctx context.Context, groupID int, subGroups []models.GroupIDDescription, insertIndex *int) error { + const idIsContaining = true + + if err := s.addJoins(ctx, groupID, subGroups, idIsContaining); err != nil { + return err + } + + ids := make([]int, len(subGroups)) + for i, v := range subGroups { + ids[i] = v.GroupID + } + + if insertIndex != nil { + // get the id of the sub-group at the insert index + insertPointID, err := s.getGroupIDAtOrderIndex(ctx, groupID, *insertIndex) + if err != nil { + return err + } + + if insertPointID == nil { + // if the insert index is out of bounds, just assume adding to the end + return nil + } + + // reorder the sub-groups + const insertAfter = false + if err := s.ReorderSubGroups(ctx, groupID, ids, *insertPointID, insertAfter); err != nil { + return err + } + } + + return nil +} + +func (s *groupRelationshipStore) RemoveSubGroups(ctx context.Context, groupID int, subGroupIDs []int) error { + const idIsContaining = true + return s.destroyJoins(ctx, groupID, subGroupIDs, idIsContaining) +} + +func (s *groupRelationshipStore) ReorderSubGroups(ctx context.Context, groupID int, subGroupIDs []int, insertPointID int, insertAfter bool) error { + for _, id := range subGroupIDs { + if err := s.reorderSubGroup(ctx, groupID, id, insertPointID, insertAfter); err != nil { + return err + } + } + + return nil +} diff --git a/pkg/sqlite/group_test.go b/pkg/sqlite/group_test.go new file mode 100644 index 00000000000..1d3637c8611 --- /dev/null +++ b/pkg/sqlite/group_test.go @@ -0,0 +1,1898 @@ +//go:build integration +// +build integration + +package sqlite_test + +import ( + "context" + "fmt" + "strconv" + "strings" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil" + "github.com/stashapp/stash/pkg/sliceutil/intslice" +) + +func loadGroupRelationships(ctx context.Context, expected models.Group, actual *models.Group) error { + if expected.URLs.Loaded() { + if err := actual.LoadURLs(ctx, db.Group); err != nil { + return err + } + } + if expected.TagIDs.Loaded() { + if err := actual.LoadTagIDs(ctx, db.Group); err != nil { + return err + } + } + if expected.ContainingGroups.Loaded() { + if err := actual.LoadContainingGroupIDs(ctx, db.Group); err != nil { + return err + } + } + if expected.SubGroups.Loaded() { + if err := actual.LoadSubGroupIDs(ctx, db.Group); err != nil { + return err + } + } + + return nil +} + +func Test_GroupStore_Create(t *testing.T) { + var ( + name = "name" + url = "url" + aliases = "alias1, alias2" + director = "director" + rating = 60 + duration = 34 + synopsis = "synopsis" + date, _ = models.ParseDate("2003-02-01") + containingGroupDescription = "containingGroupDescription" + subGroupDescription = "subGroupDescription" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + newObject models.Group + wantErr bool + }{ + { + "full", + models.Group{ + Name: name, + Duration: &duration, + Date: &date, + Rating: &rating, + StudioID: &studioIDs[studioIdxWithGroup], + Director: director, + Synopsis: synopsis, + URLs: models.NewRelatedStrings([]string{url}), + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithGroup]}), + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithScene], Description: containingGroupDescription}, + }), + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithStudio], Description: subGroupDescription}, + }), + Aliases: aliases, + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + false, + }, + { + "invalid tag id", + models.Group{ + Name: name, + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, + true, + }, + { + "invalid containing group id", + models.Group{ + Name: name, + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{{GroupID: invalidID}}), + }, + true, + }, + { + "invalid sub group id", + models.Group{ + Name: name, + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{{GroupID: invalidID}}), + }, + true, + }, + } + + qb := db.Group + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + p := tt.newObject + if err := qb.Create(ctx, &p); (err != nil) != tt.wantErr { + t.Errorf("GroupStore.Create() error = %v, wantErr = %v", err, tt.wantErr) + } + + if tt.wantErr { + assert.Zero(p.ID) + return + } + + assert.NotZero(p.ID) + + copy := tt.newObject + copy.ID = p.ID + + // load relationships + if err := loadGroupRelationships(ctx, copy, &p); err != nil { + t.Errorf("loadGroupRelationships() error = %v", err) + return + } + + assert.Equal(copy, p) + + // ensure can find the group + found, err := qb.Find(ctx, p.ID) + if err != nil { + t.Errorf("GroupStore.Find() error = %v", err) + } + + if !assert.NotNil(found) { + return + } + + // load relationships + if err := loadGroupRelationships(ctx, copy, found); err != nil { + t.Errorf("loadGroupRelationships() error = %v", err) + return + } + assert.Equal(copy, *found) + + return + }) + } +} + +func Test_groupQueryBuilder_Update(t *testing.T) { + var ( + name = "name" + url = "url" + aliases = "alias1, alias2" + director = "director" + rating = 60 + duration = 34 + synopsis = "synopsis" + date, _ = models.ParseDate("2003-02-01") + containingGroupDescription = "containingGroupDescription" + subGroupDescription = "subGroupDescription" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + updatedObject models.Group + wantErr bool + }{ + { + "full", + models.Group{ + ID: groupIDs[groupIdxWithTag], + Name: name, + Duration: &duration, + Date: &date, + Rating: &rating, + StudioID: &studioIDs[studioIdxWithGroup], + Director: director, + Synopsis: synopsis, + URLs: models.NewRelatedStrings([]string{url}), + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithGroup]}), + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithScene], Description: containingGroupDescription}, + }), + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithStudio], Description: subGroupDescription}, + }), + Aliases: aliases, + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + false, + }, + { + "clear tag ids", + models.Group{ + ID: groupIDs[groupIdxWithTag], + Name: name, + TagIDs: models.NewRelatedIDs([]int{}), + }, + false, + }, + { + "clear containing ids", + models.Group{ + ID: groupIDs[groupIdxWithParent], + Name: name, + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{}), + }, + false, + }, + { + "clear sub ids", + models.Group{ + ID: groupIDs[groupIdxWithChild], + Name: name, + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{}), + }, + false, + }, + { + "invalid studio id", + models.Group{ + ID: groupIDs[groupIdxWithScene], + Name: name, + StudioID: &invalidID, + }, + true, + }, + { + "invalid tag id", + models.Group{ + ID: groupIDs[groupIdxWithScene], + Name: name, + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, + true, + }, + { + "invalid containing group id", + models.Group{ + ID: groupIDs[groupIdxWithScene], + Name: name, + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{{GroupID: invalidID}}), + }, + true, + }, + { + "invalid sub group id", + models.Group{ + ID: groupIDs[groupIdxWithScene], + Name: name, + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{{GroupID: invalidID}}), + }, + true, + }, + } + + qb := db.Group + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + actual := tt.updatedObject + expected := tt.updatedObject + + if err := qb.Update(ctx, &actual); (err != nil) != tt.wantErr { + t.Errorf("groupQueryBuilder.Update() error = %v, wantErr %v", err, tt.wantErr) + } + + if tt.wantErr { + return + } + + s, err := qb.Find(ctx, actual.ID) + if err != nil { + t.Errorf("groupQueryBuilder.Find() error = %v", err) + } + + // load relationships + if err := loadGroupRelationships(ctx, expected, s); err != nil { + t.Errorf("loadGroupRelationships() error = %v", err) + return + } + + assert.Equal(expected, *s) + }) + } +} + +var clearGroupPartial = models.GroupPartial{ + // leave mandatory fields + Aliases: models.OptionalString{Set: true, Null: true}, + Synopsis: models.OptionalString{Set: true, Null: true}, + Director: models.OptionalString{Set: true, Null: true}, + Duration: models.OptionalInt{Set: true, Null: true}, + URLs: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, + Date: models.OptionalDate{Set: true, Null: true}, + Rating: models.OptionalInt{Set: true, Null: true}, + StudioID: models.OptionalInt{Set: true, Null: true}, + TagIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet}, + ContainingGroups: &models.UpdateGroupDescriptions{Mode: models.RelationshipUpdateModeSet}, + SubGroups: &models.UpdateGroupDescriptions{Mode: models.RelationshipUpdateModeSet}, +} + +func emptyGroup(idx int) models.Group { + return models.Group{ + ID: groupIDs[idx], + Name: groupNames[idx], + TagIDs: models.NewRelatedIDs([]int{}), + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{}), + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{}), + } +} + +func Test_groupQueryBuilder_UpdatePartial(t *testing.T) { + var ( + name = "name" + url = "url" + aliases = "alias1, alias2" + director = "director" + rating = 60 + duration = 34 + synopsis = "synopsis" + date, _ = models.ParseDate("2003-02-01") + containingGroupDescription = "containingGroupDescription" + subGroupDescription = "subGroupDescription" + createdAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + updatedAt = time.Date(2001, 1, 1, 0, 0, 0, 0, time.UTC) + ) + + tests := []struct { + name string + id int + partial models.GroupPartial + want models.Group + wantErr bool + }{ + { + "full", + groupIDs[groupIdxWithScene], + models.GroupPartial{ + Name: models.NewOptionalString(name), + Director: models.NewOptionalString(director), + Synopsis: models.NewOptionalString(synopsis), + Aliases: models.NewOptionalString(aliases), + URLs: &models.UpdateStrings{ + Values: []string{url}, + Mode: models.RelationshipUpdateModeSet, + }, + Date: models.NewOptionalDate(date), + Duration: models.NewOptionalInt(duration), + Rating: models.NewOptionalInt(rating), + StudioID: models.NewOptionalInt(studioIDs[studioIdxWithGroup]), + CreatedAt: models.NewOptionalTime(createdAt), + UpdatedAt: models.NewOptionalTime(updatedAt), + TagIDs: &models.UpdateIDs{ + IDs: []int{tagIDs[tagIdx1WithGroup], tagIDs[tagIdx1WithDupName]}, + Mode: models.RelationshipUpdateModeSet, + }, + ContainingGroups: &models.UpdateGroupDescriptions{ + Groups: []models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithStudio], Description: containingGroupDescription}, + {GroupID: groupIDs[groupIdxWithThreeTags], Description: containingGroupDescription}, + }, + Mode: models.RelationshipUpdateModeSet, + }, + SubGroups: &models.UpdateGroupDescriptions{ + Groups: []models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithTag], Description: subGroupDescription}, + {GroupID: groupIDs[groupIdxWithDupName], Description: subGroupDescription}, + }, + Mode: models.RelationshipUpdateModeSet, + }, + }, + models.Group{ + ID: groupIDs[groupIdxWithScene], + Name: name, + Director: director, + Synopsis: synopsis, + Aliases: aliases, + URLs: models.NewRelatedStrings([]string{url}), + Date: &date, + Duration: &duration, + Rating: &rating, + StudioID: &studioIDs[studioIdxWithGroup], + CreatedAt: createdAt, + UpdatedAt: updatedAt, + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithGroup]}), + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithStudio], Description: containingGroupDescription}, + {GroupID: groupIDs[groupIdxWithThreeTags], Description: containingGroupDescription}, + }), + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithTag], Description: subGroupDescription}, + {GroupID: groupIDs[groupIdxWithDupName], Description: subGroupDescription}, + }), + }, + false, + }, + { + "clear all", + groupIDs[groupIdxWithScene], + clearGroupPartial, + emptyGroup(groupIdxWithScene), + false, + }, + { + "clear tag ids", + groupIDs[groupIdxWithTag], + clearGroupPartial, + emptyGroup(groupIdxWithTag), + false, + }, + { + "clear group relationships", + groupIDs[groupIdxWithParentAndChild], + clearGroupPartial, + emptyGroup(groupIdxWithParentAndChild), + false, + }, + { + "add containing group", + groupIDs[groupIdxWithParent], + models.GroupPartial{ + ContainingGroups: &models.UpdateGroupDescriptions{ + Groups: []models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithScene], Description: containingGroupDescription}, + }, + Mode: models.RelationshipUpdateModeAdd, + }, + }, + models.Group{ + ID: groupIDs[groupIdxWithParent], + Name: groupNames[groupIdxWithParent], + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithChild]}, + {GroupID: groupIDs[groupIdxWithScene], Description: containingGroupDescription}, + }), + }, + false, + }, + { + "add sub group", + groupIDs[groupIdxWithChild], + models.GroupPartial{ + SubGroups: &models.UpdateGroupDescriptions{ + Groups: []models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithScene], Description: subGroupDescription}, + }, + Mode: models.RelationshipUpdateModeAdd, + }, + }, + models.Group{ + ID: groupIDs[groupIdxWithChild], + Name: groupNames[groupIdxWithChild], + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithParent]}, + {GroupID: groupIDs[groupIdxWithScene], Description: subGroupDescription}, + }), + }, + false, + }, + { + "remove containing group", + groupIDs[groupIdxWithParent], + models.GroupPartial{ + ContainingGroups: &models.UpdateGroupDescriptions{ + Groups: []models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithChild]}, + }, + Mode: models.RelationshipUpdateModeRemove, + }, + }, + models.Group{ + ID: groupIDs[groupIdxWithParent], + Name: groupNames[groupIdxWithParent], + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{}), + }, + false, + }, + { + "remove sub group", + groupIDs[groupIdxWithChild], + models.GroupPartial{ + SubGroups: &models.UpdateGroupDescriptions{ + Groups: []models.GroupIDDescription{ + {GroupID: groupIDs[groupIdxWithParent]}, + }, + Mode: models.RelationshipUpdateModeRemove, + }, + }, + models.Group{ + ID: groupIDs[groupIdxWithChild], + Name: groupNames[groupIdxWithChild], + SubGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{}), + }, + false, + }, + { + "invalid id", + invalidID, + models.GroupPartial{}, + models.Group{}, + true, + }, + } + for _, tt := range tests { + qb := db.Group + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + got, err := qb.UpdatePartial(ctx, tt.id, tt.partial) + if (err != nil) != tt.wantErr { + t.Errorf("groupQueryBuilder.UpdatePartial() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if tt.wantErr { + return + } + + // load relationships + if err := loadGroupRelationships(ctx, tt.want, got); err != nil { + t.Errorf("loadGroupRelationships() error = %v", err) + return + } + + assert.Equal(tt.want, *got) + + s, err := qb.Find(ctx, tt.id) + if err != nil { + t.Errorf("groupQueryBuilder.Find() error = %v", err) + } + + // load relationships + if err := loadGroupRelationships(ctx, tt.want, s); err != nil { + t.Errorf("loadGroupRelationships() error = %v", err) + return + } + + assert.Equal(tt.want, *s) + }) + } +} + +func TestGroupFindByName(t *testing.T) { + withTxn(func(ctx context.Context) error { + mqb := db.Group + + name := groupNames[groupIdxWithScene] // find a group by name + + group, err := mqb.FindByName(ctx, name, false) + + if err != nil { + t.Errorf("Error finding groups: %s", err.Error()) + } + + assert.Equal(t, groupNames[groupIdxWithScene], group.Name) + + name = groupNames[groupIdxWithDupName] // find a group by name nocase + + group, err = mqb.FindByName(ctx, name, true) + + if err != nil { + t.Errorf("Error finding groups: %s", err.Error()) + } + // groupIdxWithDupName and groupIdxWithScene should have similar names ( only diff should be Name vs NaMe) + //group.Name should match with groupIdxWithScene since its ID is before moveIdxWithDupName + assert.Equal(t, groupNames[groupIdxWithScene], group.Name) + //group.Name should match with groupIdxWithDupName if the check is not case sensitive + assert.Equal(t, strings.ToLower(groupNames[groupIdxWithDupName]), strings.ToLower(group.Name)) + + return nil + }) +} + +func TestGroupFindByNames(t *testing.T) { + withTxn(func(ctx context.Context) error { + var names []string + + mqb := db.Group + + names = append(names, groupNames[groupIdxWithScene]) // find groups by names + + groups, err := mqb.FindByNames(ctx, names, false) + if err != nil { + t.Errorf("Error finding groups: %s", err.Error()) + } + assert.Len(t, groups, 1) + assert.Equal(t, groupNames[groupIdxWithScene], groups[0].Name) + + groups, err = mqb.FindByNames(ctx, names, true) // find groups by names nocase + if err != nil { + t.Errorf("Error finding groups: %s", err.Error()) + } + assert.Len(t, groups, 2) // groupIdxWithScene and groupIdxWithDupName + assert.Equal(t, strings.ToLower(groupNames[groupIdxWithScene]), strings.ToLower(groups[0].Name)) + assert.Equal(t, strings.ToLower(groupNames[groupIdxWithScene]), strings.ToLower(groups[1].Name)) + + return nil + }) +} + +func groupsToIDs(i []*models.Group) []int { + ret := make([]int, len(i)) + for i, v := range i { + ret[i] = v.ID + } + + return ret +} + +func TestGroupQuery(t *testing.T) { + var ( + frontImage = "front_image" + backImage = "back_image" + ) + + tests := []struct { + name string + findFilter *models.FindFilterType + filter *models.GroupFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "is missing front image", + nil, + &models.GroupFilterType{ + IsMissing: &frontImage, + }, + // just ensure that it doesn't error + nil, + nil, + false, + }, + { + "is missing back image", + nil, + &models.GroupFilterType{ + IsMissing: &backImage, + }, + // just ensure that it doesn't error + nil, + nil, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + results, _, err := db.Group.Query(ctx, tt.filter, tt.findFilter) + if (err != nil) != tt.wantErr { + t.Errorf("GroupQueryBuilder.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } + + ids := groupsToIDs(results) + include := indexesToIDs(performerIDs, tt.includeIdxs) + exclude := indexesToIDs(performerIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + +func TestGroupQueryStudio(t *testing.T) { + withTxn(func(ctx context.Context) error { + mqb := db.Group + studioCriterion := models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGroup]), + }, + Modifier: models.CriterionModifierIncludes, + } + + groupFilter := models.GroupFilterType{ + Studios: &studioCriterion, + } + + groups, _, err := mqb.Query(ctx, &groupFilter, nil) + if err != nil { + t.Errorf("Error querying group: %s", err.Error()) + } + + assert.Len(t, groups, 1) + + // ensure id is correct + assert.Equal(t, groupIDs[groupIdxWithStudio], groups[0].ID) + + studioCriterion = models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(studioIDs[studioIdxWithGroup]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getGroupStringValue(groupIdxWithStudio, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + groups, _, err = mqb.Query(ctx, &groupFilter, &findFilter) + if err != nil { + t.Errorf("Error querying group: %s", err.Error()) + } + assert.Len(t, groups, 0) + + return nil + }) +} + +func TestGroupQueryURL(t *testing.T) { + const sceneIdx = 1 + groupURL := getGroupStringValue(sceneIdx, urlField) + + urlCriterion := models.StringCriterionInput{ + Value: groupURL, + Modifier: models.CriterionModifierEquals, + } + + filter := models.GroupFilterType{ + URL: &urlCriterion, + } + + verifyFn := func(n *models.Group) { + t.Helper() + + urls := n.URLs.List() + var url string + if len(urls) > 0 { + url = urls[0] + } + + verifyString(t, url, urlCriterion) + } + + verifyGroupQuery(t, filter, verifyFn) + + urlCriterion.Modifier = models.CriterionModifierNotEquals + verifyGroupQuery(t, filter, verifyFn) + + urlCriterion.Modifier = models.CriterionModifierMatchesRegex + urlCriterion.Value = "group_.*1_URL" + verifyGroupQuery(t, filter, verifyFn) + + urlCriterion.Modifier = models.CriterionModifierNotMatchesRegex + verifyGroupQuery(t, filter, verifyFn) + + urlCriterion.Modifier = models.CriterionModifierIsNull + urlCriterion.Value = "" + verifyGroupQuery(t, filter, verifyFn) + + urlCriterion.Modifier = models.CriterionModifierNotNull + verifyGroupQuery(t, filter, verifyFn) +} + +func TestGroupQueryURLExcludes(t *testing.T) { + withRollbackTxn(func(ctx context.Context) error { + mqb := db.Group + + // create group with two URLs + group := models.Group{ + Name: "TestGroupQueryURLExcludes", + URLs: models.NewRelatedStrings([]string{ + "aaa", + "bbb", + }), + } + + err := mqb.Create(ctx, &group) + + if err != nil { + return fmt.Errorf("Error creating group: %w", err) + } + + // query for groups that exclude the URL "aaa" + urlCriterion := models.StringCriterionInput{ + Value: "aaa", + Modifier: models.CriterionModifierExcludes, + } + + nameCriterion := models.StringCriterionInput{ + Value: group.Name, + Modifier: models.CriterionModifierEquals, + } + + filter := models.GroupFilterType{ + URL: &urlCriterion, + Name: &nameCriterion, + } + + groups := queryGroups(ctx, t, &filter, nil) + assert.Len(t, groups, 0, "Expected no groups to be found") + + // query for groups that exclude the URL "ccc" + urlCriterion.Value = "ccc" + groups = queryGroups(ctx, t, &filter, nil) + + if assert.Len(t, groups, 1, "Expected one group to be found") { + assert.Equal(t, group.Name, groups[0].Name) + } + + return nil + }) +} + +func verifyGroupQuery(t *testing.T, filter models.GroupFilterType, verifyFn func(s *models.Group)) { + withTxn(func(ctx context.Context) error { + t.Helper() + sqb := db.Group + + groups := queryGroups(ctx, t, &filter, nil) + + for _, group := range groups { + if err := group.LoadURLs(ctx, sqb); err != nil { + t.Errorf("Error loading group relationships: %v", err) + } + } + + // assume it should find at least one + assert.Greater(t, len(groups), 0) + + for _, m := range groups { + verifyFn(m) + } + + return nil + }) +} + +func queryGroups(ctx context.Context, t *testing.T, groupFilter *models.GroupFilterType, findFilter *models.FindFilterType) []*models.Group { + sqb := db.Group + groups, _, err := sqb.Query(ctx, groupFilter, findFilter) + if err != nil { + t.Errorf("Error querying group: %s", err.Error()) + } + + return groups +} + +func TestGroupQueryTags(t *testing.T) { + withTxn(func(ctx context.Context) error { + tagCriterion := models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithGroup]), + strconv.Itoa(tagIDs[tagIdx1WithGroup]), + }, + Modifier: models.CriterionModifierIncludes, + } + + groupFilter := models.GroupFilterType{ + Tags: &tagCriterion, + } + + // ensure ids are correct + groups := queryGroups(ctx, t, &groupFilter, nil) + assert.Len(t, groups, 3) + for _, group := range groups { + assert.True(t, group.ID == groupIDs[groupIdxWithTag] || group.ID == groupIDs[groupIdxWithTwoTags] || group.ID == groupIDs[groupIdxWithThreeTags]) + } + + tagCriterion = models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithGroup]), + strconv.Itoa(tagIDs[tagIdx2WithGroup]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + groups = queryGroups(ctx, t, &groupFilter, nil) + + if assert.Len(t, groups, 2) { + assert.Equal(t, sceneIDs[groupIdxWithTwoTags], groups[0].ID) + assert.Equal(t, sceneIDs[groupIdxWithThreeTags], groups[1].ID) + } + + tagCriterion = models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithGroup]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getSceneStringValue(groupIdxWithTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + groups = queryGroups(ctx, t, &groupFilter, &findFilter) + assert.Len(t, groups, 0) + + return nil + }) +} + +func TestGroupQueryTagCount(t *testing.T) { + const tagCount = 1 + tagCountCriterion := models.IntCriterionInput{ + Value: tagCount, + Modifier: models.CriterionModifierEquals, + } + + verifyGroupsTagCount(t, tagCountCriterion) + + tagCountCriterion.Modifier = models.CriterionModifierNotEquals + verifyGroupsTagCount(t, tagCountCriterion) + + tagCountCriterion.Modifier = models.CriterionModifierGreaterThan + verifyGroupsTagCount(t, tagCountCriterion) + + tagCountCriterion.Modifier = models.CriterionModifierLessThan + verifyGroupsTagCount(t, tagCountCriterion) +} + +func verifyGroupsTagCount(t *testing.T, tagCountCriterion models.IntCriterionInput) { + withTxn(func(ctx context.Context) error { + sqb := db.Group + groupFilter := models.GroupFilterType{ + TagCount: &tagCountCriterion, + } + + groups := queryGroups(ctx, t, &groupFilter, nil) + assert.Greater(t, len(groups), 0) + + for _, group := range groups { + ids, err := sqb.GetTagIDs(ctx, group.ID) + if err != nil { + return err + } + verifyInt(t, len(ids), tagCountCriterion) + } + + return nil + }) +} + +func TestGroupQuerySorting(t *testing.T) { + sort := "scenes_count" + direction := models.SortDirectionEnumDesc + findFilter := models.FindFilterType{ + Sort: &sort, + Direction: &direction, + } + + withTxn(func(ctx context.Context) error { + groups := queryGroups(ctx, t, nil, &findFilter) + + // scenes should be in same order as indexes + firstGroup := groups[0] + + assert.Equal(t, groupIDs[groupIdxWithScene], firstGroup.ID) + + // sort in descending order + direction = models.SortDirectionEnumAsc + + groups = queryGroups(ctx, t, nil, &findFilter) + lastGroup := groups[len(groups)-1] + + assert.Equal(t, groupIDs[groupIdxWithParentAndScene], lastGroup.ID) + + return nil + }) +} + +func TestGroupQuerySortOrderIndex(t *testing.T) { + sort := "sub_group_order" + direction := models.SortDirectionEnumDesc + findFilter := models.FindFilterType{ + Sort: &sort, + Direction: &direction, + } + + groupFilter := models.GroupFilterType{ + ContainingGroups: &models.HierarchicalMultiCriterionInput{ + Value: intslice.IntSliceToStringSlice([]int{groupIdxWithChild}), + Modifier: models.CriterionModifierIncludes, + }, + } + + withTxn(func(ctx context.Context) error { + // just ensure there are no errors + _, _, err := db.Group.Query(ctx, &groupFilter, &findFilter) + if err != nil { + t.Errorf("Error querying group: %s", err.Error()) + } + + _, _, err = db.Group.Query(ctx, nil, &findFilter) + if err != nil { + t.Errorf("Error querying group: %s", err.Error()) + } + + return nil + }) +} + +func TestGroupUpdateFrontImage(t *testing.T) { + if err := withRollbackTxn(func(ctx context.Context) error { + qb := db.Group + + // create group to test against + const name = "TestGroupUpdateGroupImages" + group := models.Group{ + Name: name, + } + err := qb.Create(ctx, &group) + if err != nil { + return fmt.Errorf("Error creating group: %s", err.Error()) + } + + return testUpdateImage(t, ctx, group.ID, qb.UpdateFrontImage, qb.GetFrontImage) + }); err != nil { + t.Error(err.Error()) + } +} + +func TestGroupUpdateBackImage(t *testing.T) { + if err := withRollbackTxn(func(ctx context.Context) error { + qb := db.Group + + // create group to test against + const name = "TestGroupUpdateGroupImages" + group := models.Group{ + Name: name, + } + err := qb.Create(ctx, &group) + if err != nil { + return fmt.Errorf("Error creating group: %s", err.Error()) + } + + return testUpdateImage(t, ctx, group.ID, qb.UpdateBackImage, qb.GetBackImage) + }); err != nil { + t.Error(err.Error()) + } +} + +func TestGroupQueryContainingGroups(t *testing.T) { + const nameField = "Name" + + type criterion struct { + valueIdxs []int + modifier models.CriterionModifier + depth int + } + + tests := []struct { + name string + c criterion + q string + includeIdxs []int + }{ + { + "includes", + criterion{ + []int{groupIdxWithChild}, + models.CriterionModifierIncludes, + 0, + }, + "", + []int{groupIdxWithParent}, + }, + { + "excludes", + criterion{ + []int{groupIdxWithChild}, + models.CriterionModifierExcludes, + 0, + }, + getGroupStringValue(groupIdxWithParent, nameField), + nil, + }, + { + "includes (all levels)", + criterion{ + []int{groupIdxWithGrandChild}, + models.CriterionModifierIncludes, + -1, + }, + "", + []int{groupIdxWithParentAndChild, groupIdxWithGrandParent}, + }, + { + "includes (1 level)", + criterion{ + []int{groupIdxWithGrandChild}, + models.CriterionModifierIncludes, + 1, + }, + "", + []int{groupIdxWithParentAndChild, groupIdxWithGrandParent}, + }, + { + "is null", + criterion{ + nil, + models.CriterionModifierIsNull, + 0, + }, + getGroupStringValue(groupIdxWithParent, nameField), + nil, + }, + { + "not null", + criterion{ + nil, + models.CriterionModifierNotNull, + 0, + }, + "", + []int{groupIdxWithParentAndChild, groupIdxWithParent, groupIdxWithGrandParent, groupIdxWithParentAndScene}, + }, + } + + qb := db.Group + + for _, tt := range tests { + valueIDs := indexesToIDs(groupIDs, tt.c.valueIdxs) + expectedIDs := indexesToIDs(groupIDs, tt.includeIdxs) + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + groupFilter := &models.GroupFilterType{ + ContainingGroups: &models.HierarchicalMultiCriterionInput{ + Value: intslice.IntSliceToStringSlice(valueIDs), + Modifier: tt.c.modifier, + }, + } + + if tt.c.depth != 0 { + groupFilter.ContainingGroups.Depth = &tt.c.depth + } + + findFilter := models.FindFilterType{} + if tt.q != "" { + findFilter.Q = &tt.q + } + + groups, _, err := qb.Query(ctx, groupFilter, &findFilter) + if err != nil { + t.Errorf("GroupStore.Query() error = %v", err) + return + } + + // get ids of groups + groupIDs := sliceutil.Map(groups, func(g *models.Group) int { return g.ID }) + assert.ElementsMatch(t, expectedIDs, groupIDs) + }) + } +} + +func TestGroupQuerySubGroups(t *testing.T) { + const nameField = "Name" + + type criterion struct { + valueIdxs []int + modifier models.CriterionModifier + depth int + } + + tests := []struct { + name string + c criterion + q string + expectedIdxs []int + }{ + { + "includes", + criterion{ + []int{groupIdxWithParent}, + models.CriterionModifierIncludes, + 0, + }, + "", + []int{groupIdxWithChild}, + }, + { + "excludes", + criterion{ + []int{groupIdxWithParent}, + models.CriterionModifierExcludes, + 0, + }, + getGroupStringValue(groupIdxWithChild, nameField), + nil, + }, + { + "includes (all levels)", + criterion{ + []int{groupIdxWithGrandParent}, + models.CriterionModifierIncludes, + -1, + }, + "", + []int{groupIdxWithGrandChild, groupIdxWithParentAndChild}, + }, + { + "includes (1 level)", + criterion{ + []int{groupIdxWithGrandParent}, + models.CriterionModifierIncludes, + 1, + }, + "", + []int{groupIdxWithGrandChild, groupIdxWithParentAndChild}, + }, + { + "is null", + criterion{ + nil, + models.CriterionModifierIsNull, + 0, + }, + getGroupStringValue(groupIdxWithChild, nameField), + nil, + }, + { + "not null", + criterion{ + nil, + models.CriterionModifierNotNull, + 0, + }, + "", + []int{groupIdxWithGrandChild, groupIdxWithChild, groupIdxWithParentAndChild, groupIdxWithChildWithScene}, + }, + } + + qb := db.Group + + for _, tt := range tests { + valueIDs := indexesToIDs(groupIDs, tt.c.valueIdxs) + expectedIDs := indexesToIDs(groupIDs, tt.expectedIdxs) + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + groupFilter := &models.GroupFilterType{ + SubGroups: &models.HierarchicalMultiCriterionInput{ + Value: intslice.IntSliceToStringSlice(valueIDs), + Modifier: tt.c.modifier, + }, + } + + if tt.c.depth != 0 { + groupFilter.SubGroups.Depth = &tt.c.depth + } + + findFilter := models.FindFilterType{} + if tt.q != "" { + findFilter.Q = &tt.q + } + + groups, _, err := qb.Query(ctx, groupFilter, &findFilter) + if err != nil { + t.Errorf("GroupStore.Query() error = %v", err) + return + } + + // get ids of groups + groupIDs := sliceutil.Map(groups, func(g *models.Group) int { return g.ID }) + assert.ElementsMatch(t, expectedIDs, groupIDs) + }) + } +} + +func TestGroupQueryContainingGroupCount(t *testing.T) { + const nameField = "Name" + + tests := []struct { + name string + value int + modifier models.CriterionModifier + q string + expectedIdxs []int + }{ + { + "equals", + 1, + models.CriterionModifierEquals, + "", + []int{groupIdxWithParent, groupIdxWithGrandParent, groupIdxWithParentAndChild, groupIdxWithParentAndScene}, + }, + { + "not equals", + 1, + models.CriterionModifierNotEquals, + getGroupStringValue(groupIdxWithParent, nameField), + nil, + }, + { + "less than", + 1, + models.CriterionModifierLessThan, + getGroupStringValue(groupIdxWithParent, nameField), + nil, + }, + { + "greater than", + 0, + models.CriterionModifierGreaterThan, + "", + []int{groupIdxWithParent, groupIdxWithGrandParent, groupIdxWithParentAndChild, groupIdxWithParentAndScene}, + }, + } + + qb := db.Group + + for _, tt := range tests { + expectedIDs := indexesToIDs(groupIDs, tt.expectedIdxs) + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + groupFilter := &models.GroupFilterType{ + ContainingGroupCount: &models.IntCriterionInput{ + Value: tt.value, + Modifier: tt.modifier, + }, + } + + findFilter := models.FindFilterType{} + if tt.q != "" { + findFilter.Q = &tt.q + } + + groups, _, err := qb.Query(ctx, groupFilter, &findFilter) + if err != nil { + t.Errorf("GroupStore.Query() error = %v", err) + return + } + + // get ids of groups + groupIDs := sliceutil.Map(groups, func(g *models.Group) int { return g.ID }) + assert.ElementsMatch(t, expectedIDs, groupIDs) + }) + } +} + +func TestGroupQuerySubGroupCount(t *testing.T) { + const nameField = "Name" + + tests := []struct { + name string + value int + modifier models.CriterionModifier + q string + expectedIdxs []int + }{ + { + "equals", + 1, + models.CriterionModifierEquals, + "", + []int{groupIdxWithChild, groupIdxWithGrandChild, groupIdxWithParentAndChild, groupIdxWithChildWithScene}, + }, + { + "not equals", + 1, + models.CriterionModifierNotEquals, + getGroupStringValue(groupIdxWithChild, nameField), + nil, + }, + { + "less than", + 1, + models.CriterionModifierLessThan, + getGroupStringValue(groupIdxWithChild, nameField), + nil, + }, + { + "greater than", + 0, + models.CriterionModifierGreaterThan, + "", + []int{groupIdxWithChild, groupIdxWithGrandChild, groupIdxWithParentAndChild, groupIdxWithChildWithScene}, + }, + } + + qb := db.Group + + for _, tt := range tests { + expectedIDs := indexesToIDs(groupIDs, tt.expectedIdxs) + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + groupFilter := &models.GroupFilterType{ + SubGroupCount: &models.IntCriterionInput{ + Value: tt.value, + Modifier: tt.modifier, + }, + } + + findFilter := models.FindFilterType{} + if tt.q != "" { + findFilter.Q = &tt.q + } + + groups, _, err := qb.Query(ctx, groupFilter, &findFilter) + if err != nil { + t.Errorf("GroupStore.Query() error = %v", err) + return + } + + // get ids of groups + groupIDs := sliceutil.Map(groups, func(g *models.Group) int { return g.ID }) + assert.ElementsMatch(t, expectedIDs, groupIDs) + }) + } +} + +func TestGroupFindInAncestors(t *testing.T) { + tests := []struct { + name string + ancestorIdxs []int + idxs []int + expectedIdxs []int + }{ + { + "basic", + []int{groupIdxWithGrandParent}, + []int{groupIdxWithGrandChild}, + []int{groupIdxWithGrandChild}, + }, + { + "same", + []int{groupIdxWithScene}, + []int{groupIdxWithScene}, + []int{groupIdxWithScene}, + }, + { + "no matches", + []int{groupIdxWithGrandParent}, + []int{groupIdxWithScene}, + nil, + }, + } + + qb := db.Group + + for _, tt := range tests { + ancestorIDs := indexesToIDs(groupIDs, tt.ancestorIdxs) + ids := indexesToIDs(groupIDs, tt.idxs) + expectedIDs := indexesToIDs(groupIDs, tt.expectedIdxs) + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + found, err := qb.FindInAncestors(ctx, ancestorIDs, ids) + if err != nil { + t.Errorf("GroupStore.FindInAncestors() error = %v", err) + return + } + + // get ids of groups + assert.ElementsMatch(t, found, expectedIDs) + }) + } +} + +func TestGroupReorderSubGroups(t *testing.T) { + tests := []struct { + name string + subGroupLen int + idxsToMove []int + insertLoc int + insertAfter bool + // order of elements, using original indexes + expectedIdxs []int + }{ + { + "move single back before", + 5, + []int{2}, + 1, + false, + []int{0, 2, 1, 3, 4}, + }, + { + "move single forward before", + 5, + []int{2}, + 4, + false, + []int{0, 1, 3, 2, 4}, + }, + { + "move multiple back before", + 5, + []int{3, 2, 4}, + 0, + false, + []int{3, 2, 4, 0, 1}, + }, + { + "move multiple forward before", + 5, + []int{2, 1, 0}, + 4, + false, + []int{3, 2, 1, 0, 4}, + }, + { + "move single back after", + 5, + []int{2}, + 0, + true, + []int{0, 2, 1, 3, 4}, + }, + { + "move single forward after", + 5, + []int{2}, + 4, + true, + []int{0, 1, 3, 4, 2}, + }, + { + "move multiple back after", + 5, + []int{3, 2, 4}, + 0, + false, + []int{0, 3, 2, 4, 1}, + }, + { + "move multiple forward after", + 5, + []int{2, 1, 0}, + 4, + false, + []int{3, 4, 2, 1, 0}, + }, + } + + qb := db.Group + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + // create the group + group := models.Group{ + Name: "TestGroupReorderSubGroups", + } + + if err := qb.Create(ctx, &group); err != nil { + t.Errorf("GroupStore.Create() error = %v", err) + return + } + + // and sub-groups + idxToId := make([]int, tt.subGroupLen) + + for i := 0; i < tt.subGroupLen; i++ { + subGroup := models.Group{ + Name: fmt.Sprintf("SubGroup %d", i), + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: group.ID}, + }), + } + + if err := qb.Create(ctx, &subGroup); err != nil { + t.Errorf("GroupStore.Create() error = %v", err) + return + } + + idxToId[i] = subGroup.ID + } + + // reorder + idsToMove := indexesToIDs(idxToId, tt.idxsToMove) + insertID := idxToId[tt.insertLoc] + if err := qb.ReorderSubGroups(ctx, group.ID, idsToMove, insertID, tt.insertAfter); err != nil { + t.Errorf("GroupStore.ReorderSubGroups() error = %v", err) + return + } + + // validate the new order + gd, err := qb.GetSubGroupDescriptions(ctx, group.ID) + if err != nil { + t.Errorf("GroupStore.GetSubGroupDescriptions() error = %v", err) + return + } + + // get ids of groups + newIDs := sliceutil.Map(gd, func(gd models.GroupIDDescription) int { return gd.GroupID }) + newIdxs := sliceutil.Map(newIDs, func(id int) int { return sliceutil.Index(idxToId, id) }) + + assert.ElementsMatch(t, tt.expectedIdxs, newIdxs) + }) + } +} + +func TestGroupAddSubGroups(t *testing.T) { + tests := []struct { + name string + existingSubGroupLen int + insertGroupsLen int + insertLoc int + // order of elements, using original indexes + expectedIdxs []int + }{ + { + "append single", + 4, + 1, + 999, + []int{0, 1, 2, 3, 4}, + }, + { + "insert single middle", + 4, + 1, + 2, + []int{0, 1, 4, 2, 3}, + }, + { + "insert single start", + 4, + 1, + 0, + []int{4, 0, 1, 2, 3}, + }, + { + "append multiple", + 4, + 2, + 999, + []int{0, 1, 2, 3, 4, 5}, + }, + { + "insert multiple middle", + 4, + 2, + 2, + []int{0, 1, 4, 5, 2, 3}, + }, + { + "insert multiple start", + 4, + 2, + 0, + []int{4, 5, 0, 1, 2, 3}, + }, + } + + qb := db.Group + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + // create the group + group := models.Group{ + Name: "TestGroupReorderSubGroups", + } + + if err := qb.Create(ctx, &group); err != nil { + t.Errorf("GroupStore.Create() error = %v", err) + return + } + + // and sub-groups + idxToId := make([]int, tt.existingSubGroupLen+tt.insertGroupsLen) + + for i := 0; i < tt.existingSubGroupLen; i++ { + subGroup := models.Group{ + Name: fmt.Sprintf("Existing SubGroup %d", i), + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: group.ID}, + }), + } + + if err := qb.Create(ctx, &subGroup); err != nil { + t.Errorf("GroupStore.Create() error = %v", err) + return + } + + idxToId[i] = subGroup.ID + } + + // and sub-groups to insert + for i := 0; i < tt.insertGroupsLen; i++ { + subGroup := models.Group{ + Name: fmt.Sprintf("Inserted SubGroup %d", i), + } + + if err := qb.Create(ctx, &subGroup); err != nil { + t.Errorf("GroupStore.Create() error = %v", err) + return + } + + idxToId[i+tt.existingSubGroupLen] = subGroup.ID + } + + // convert ids to description + idDescriptions := make([]models.GroupIDDescription, tt.insertGroupsLen) + for i, id := range idxToId[tt.existingSubGroupLen:] { + idDescriptions[i] = models.GroupIDDescription{GroupID: id} + } + + // add + if err := qb.AddSubGroups(ctx, group.ID, idDescriptions, &tt.insertLoc); err != nil { + t.Errorf("GroupStore.AddSubGroups() error = %v", err) + return + } + + // validate the new order + gd, err := qb.GetSubGroupDescriptions(ctx, group.ID) + if err != nil { + t.Errorf("GroupStore.GetSubGroupDescriptions() error = %v", err) + return + } + + // get ids of groups + newIDs := sliceutil.Map(gd, func(gd models.GroupIDDescription) int { return gd.GroupID }) + newIdxs := sliceutil.Map(newIDs, func(id int) int { return sliceutil.Index(idxToId, id) }) + + assert.ElementsMatch(t, tt.expectedIdxs, newIdxs) + }) + } +} + +func TestGroupRemoveSubGroups(t *testing.T) { + tests := []struct { + name string + subGroupLen int + removeIdxs []int + // order of elements, using original indexes + expectedIdxs []int + }{ + { + "remove last", + 4, + []int{3}, + []int{0, 1, 2}, + }, + { + "remove first", + 4, + []int{0}, + []int{1, 2, 3}, + }, + { + "remove middle", + 4, + []int{2}, + []int{0, 1, 3}, + }, + { + "remove multiple", + 4, + []int{1, 3}, + []int{0, 2}, + }, + { + "remove all", + 4, + []int{0, 1, 2, 3}, + []int{}, + }, + } + + qb := db.Group + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + // create the group + group := models.Group{ + Name: "TestGroupReorderSubGroups", + } + + if err := qb.Create(ctx, &group); err != nil { + t.Errorf("GroupStore.Create() error = %v", err) + return + } + + // and sub-groups + idxToId := make([]int, tt.subGroupLen) + + for i := 0; i < tt.subGroupLen; i++ { + subGroup := models.Group{ + Name: fmt.Sprintf("Existing SubGroup %d", i), + ContainingGroups: models.NewRelatedGroupDescriptions([]models.GroupIDDescription{ + {GroupID: group.ID}, + }), + } + + if err := qb.Create(ctx, &subGroup); err != nil { + t.Errorf("GroupStore.Create() error = %v", err) + return + } + + idxToId[i] = subGroup.ID + } + + idsToRemove := indexesToIDs(idxToId, tt.removeIdxs) + if err := qb.RemoveSubGroups(ctx, group.ID, idsToRemove); err != nil { + t.Errorf("GroupStore.RemoveSubGroups() error = %v", err) + return + } + + // validate the new order + gd, err := qb.GetSubGroupDescriptions(ctx, group.ID) + if err != nil { + t.Errorf("GroupStore.GetSubGroupDescriptions() error = %v", err) + return + } + + // get ids of groups + newIDs := sliceutil.Map(gd, func(gd models.GroupIDDescription) int { return gd.GroupID }) + newIdxs := sliceutil.Map(newIDs, func(id int) int { return sliceutil.Index(idxToId, id) }) + + assert.ElementsMatch(t, tt.expectedIdxs, newIdxs) + }) + } +} + +func TestGroupFindSubGroupIDs(t *testing.T) { + tests := []struct { + name string + containingGroupIdx int + subIdxs []int + expectedIdxs []int + }{ + { + "overlap", + groupIdxWithGrandChild, + []int{groupIdxWithParentAndChild, groupIdxWithGrandParent}, + []int{groupIdxWithParentAndChild}, + }, + { + "non-overlap", + groupIdxWithGrandChild, + []int{groupIdxWithGrandParent}, + []int{}, + }, + { + "none", + groupIdxWithScene, + []int{groupIdxWithDupName}, + []int{}, + }, + { + "invalid", + invalidID, + []int{invalidID}, + []int{}, + }, + } + + qb := db.Group + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + subIDs := indexesToIDs(groupIDs, tt.subIdxs) + + id := indexToID(groupIDs, tt.containingGroupIdx) + + found, err := qb.FindSubGroupIDs(ctx, id, subIDs) + if err != nil { + t.Errorf("GroupStore.FindSubGroupIDs() error = %v", err) + return + } + + // get ids of groups + foundIdxs := sliceutil.Map(found, func(id int) int { return sliceutil.Index(groupIDs, id) }) + + assert.ElementsMatch(t, tt.expectedIdxs, foundIdxs) + }) + } +} + +// TODO Update +// TODO Destroy - ensure image is destroyed +// TODO Find +// TODO Count +// TODO All +// TODO Query diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index 02cd09ec731..8248427a8eb 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -112,24 +112,87 @@ func (r *imageRowRecord) fromPartial(i models.ImagePartial) { r.setTimestamp("updated_at", i.UpdatedAt) } -type ImageStore struct { +type imageRepositoryType struct { repository + performers joinRepository + galleries joinRepository + tags joinRepository + files filesRepository +} - tableMgr *table - oCounterManager +func (r *imageRepositoryType) addImagesFilesTable(f *filterBuilder) { + f.addLeftJoin(imagesFilesTable, "", "images_files.image_id = images.id") +} - fileStore *FileStore +func (r *imageRepositoryType) addFilesTable(f *filterBuilder) { + r.addImagesFilesTable(f) + f.addLeftJoin(fileTable, "", "images_files.file_id = files.id") } -func NewImageStore(fileStore *FileStore) *ImageStore { - return &ImageStore{ +func (r *imageRepositoryType) addFoldersTable(f *filterBuilder) { + r.addFilesTable(f) + f.addLeftJoin(folderTable, "", "files.parent_folder_id = folders.id") +} + +func (r *imageRepositoryType) addImageFilesTable(f *filterBuilder) { + r.addImagesFilesTable(f) + f.addLeftJoin(imageFileTable, "", "image_files.file_id = images_files.file_id") +} + +var ( + imageRepository = imageRepositoryType{ repository: repository{ tableName: imageTable, idColumn: idColumn, }, + + performers: joinRepository{ + repository: repository{ + tableName: performersImagesTable, + idColumn: imageIDColumn, + }, + fkColumn: performerIDColumn, + }, + + galleries: joinRepository{ + repository: repository{ + tableName: galleriesImagesTable, + idColumn: imageIDColumn, + }, + fkColumn: galleryIDColumn, + }, + + files: filesRepository{ + repository: repository{ + tableName: imagesFilesTable, + idColumn: imageIDColumn, + }, + }, + + tags: joinRepository{ + repository: repository{ + tableName: imagesTagsTable, + idColumn: imageIDColumn, + }, + fkColumn: tagIDColumn, + foreignTable: tagTable, + orderBy: "tags.name ASC", + }, + } +) + +type ImageStore struct { + tableMgr *table + oCounterManager + + repo *storeRepository +} + +func NewImageStore(r *storeRepository) *ImageStore { + return &ImageStore{ tableMgr: imageTableMgr, oCounterManager: oCounterManager{imageTableMgr}, - fileStore: fileStore, + repo: r, } } @@ -417,14 +480,50 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } +// Returns the custom cover for the gallery, if one has been set. +func (qb *ImageStore) CoverByGalleryID(ctx context.Context, galleryID int) (*models.Image, error) { + table := qb.table() + + sq := dialect.From(table). + InnerJoin( + galleriesImagesJoinTable, + goqu.On(table.Col(idColumn).Eq(galleriesImagesJoinTable.Col(imageIDColumn))), + ). + Select(table.Col(idColumn)). + Where(goqu.And( + galleriesImagesJoinTable.Col("gallery_id").Eq(galleryID), + galleriesImagesJoinTable.Col("cover").Eq(true), + )) + + q := qb.selectDataset().Prepared(true).Where( + table.Col(idColumn).Eq( + sq, + ), + ) + + ret, err := qb.getMany(ctx, q) + if err != nil { + return nil, fmt.Errorf("getting cover for gallery %d: %w", galleryID, err) + } + + switch { + case len(ret) > 1: + return nil, fmt.Errorf("internal error: multiple covers returned for gallery %d", galleryID) + case len(ret) == 1: + return ret[0], nil + default: + return nil, nil + } +} + func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { - fileIDs, err := qb.filesRepository().get(ctx, id) + fileIDs, err := imageRepository.files.get(ctx, id) if err != nil { return nil, err } // use fileStore to load files - files, err := qb.fileStore.Find(ctx, fileIDs...) + files, err := qb.repo.File.Find(ctx, fileIDs...) if err != nil { return nil, err } @@ -434,7 +533,7 @@ func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]models.File, erro func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false - return qb.filesRepository().getMany(ctx, ids, primaryOnly) + return imageRepository.files.getMany(ctx, ids, primaryOnly) } func (qb *ImageStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) { @@ -505,8 +604,6 @@ func (qb *ImageStore) FindByChecksum(ctx context.Context, checksum string) ([]*m func (qb *ImageStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*models.Image, error) { table := qb.table() - fileTable := fileTableMgr.table - folderTable := folderTableMgr.table sq := dialect.From(table). InnerJoin( @@ -521,7 +618,7 @@ func (qb *ImageStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*mo table.Col(idColumn).Eq( sq, ), - ).Order(folderTable.Col("path").Asc(), fileTable.Col("basename").Asc()) + ).Order(goqu.L("COALESCE(folders.path, '') || COALESCE(files.basename, '') COLLATE NATURAL_CI").Asc()) ret, err := qb.getMany(ctx, q) if err != nil { @@ -531,6 +628,33 @@ func (qb *ImageStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*mo return ret, nil } +func (qb *ImageStore) FindByGalleryIDIndex(ctx context.Context, galleryID int, index uint) (*models.Image, error) { + table := qb.table() + fileTable := fileTableMgr.table + folderTable := folderTableMgr.table + + q := qb.selectDataset(). + InnerJoin( + galleriesImagesJoinTable, + goqu.On(table.Col(idColumn).Eq(galleriesImagesJoinTable.Col(imageIDColumn))), + ). + Where(galleriesImagesJoinTable.Col(galleryIDColumn).Eq(galleryID)). + Prepared(true). + Order(folderTable.Col("path").Asc(), fileTable.Col("basename").Asc()). + Limit(1).Offset(index) + + ret, err := qb.getMany(ctx, q) + if err != nil { + return nil, fmt.Errorf("getting images for gallery %d: %w", galleryID, err) + } + + if len(ret) == 0 { + return nil, nil + } + + return ret[0], nil +} + func (qb *ImageStore) CountByGalleryID(ctx context.Context, galleryID int) (int, error) { joinTable := goqu.T(galleriesImagesTable) @@ -642,110 +766,6 @@ func (qb *ImageStore) All(ctx context.Context) ([]*models.Image, error) { return qb.getMany(ctx, qb.selectDataset()) } -func (qb *ImageStore) validateFilter(imageFilter *models.ImageFilterType) error { - const and = "AND" - const or = "OR" - const not = "NOT" - - if imageFilter.And != nil { - if imageFilter.Or != nil { - return illegalFilterCombination(and, or) - } - if imageFilter.Not != nil { - return illegalFilterCombination(and, not) - } - - return qb.validateFilter(imageFilter.And) - } - - if imageFilter.Or != nil { - if imageFilter.Not != nil { - return illegalFilterCombination(or, not) - } - - return qb.validateFilter(imageFilter.Or) - } - - if imageFilter.Not != nil { - return qb.validateFilter(imageFilter.Not) - } - - return nil -} - -func (qb *ImageStore) makeFilter(ctx context.Context, imageFilter *models.ImageFilterType) *filterBuilder { - query := &filterBuilder{} - - if imageFilter.And != nil { - query.and(qb.makeFilter(ctx, imageFilter.And)) - } - if imageFilter.Or != nil { - query.or(qb.makeFilter(ctx, imageFilter.Or)) - } - if imageFilter.Not != nil { - query.not(qb.makeFilter(ctx, imageFilter.Not)) - } - - query.handleCriterion(ctx, intCriterionHandler(imageFilter.ID, "images.id", nil)) - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if imageFilter.Checksum != nil { - qb.addImagesFilesTable(f) - f.addInnerJoin(fingerprintTable, "fingerprints_md5", "images_files.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'") - } - - stringCriterionHandler(imageFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) - })) - query.handleCriterion(ctx, stringCriterionHandler(imageFilter.Title, "images.title")) - query.handleCriterion(ctx, stringCriterionHandler(imageFilter.Code, "images.code")) - query.handleCriterion(ctx, stringCriterionHandler(imageFilter.Details, "images.details")) - query.handleCriterion(ctx, stringCriterionHandler(imageFilter.Photographer, "images.photographer")) - - query.handleCriterion(ctx, pathCriterionHandler(imageFilter.Path, "folders.path", "files.basename", qb.addFoldersTable)) - query.handleCriterion(ctx, imageFileCountCriterionHandler(qb, imageFilter.FileCount)) - query.handleCriterion(ctx, intCriterionHandler(imageFilter.Rating100, "images.rating", nil)) - query.handleCriterion(ctx, intCriterionHandler(imageFilter.OCounter, "images.o_counter", nil)) - query.handleCriterion(ctx, boolCriterionHandler(imageFilter.Organized, "images.organized", nil)) - query.handleCriterion(ctx, dateCriterionHandler(imageFilter.Date, "images.date")) - query.handleCriterion(ctx, imageURLsCriterionHandler(imageFilter.URL)) - - query.handleCriterion(ctx, resolutionCriterionHandler(imageFilter.Resolution, "image_files.height", "image_files.width", qb.addImageFilesTable)) - query.handleCriterion(ctx, orientationCriterionHandler(imageFilter.Orientation, "image_files.height", "image_files.width", qb.addImageFilesTable)) - query.handleCriterion(ctx, imageIsMissingCriterionHandler(qb, imageFilter.IsMissing)) - - query.handleCriterion(ctx, imageTagsCriterionHandler(qb, imageFilter.Tags)) - query.handleCriterion(ctx, imageTagCountCriterionHandler(qb, imageFilter.TagCount)) - query.handleCriterion(ctx, imageGalleriesCriterionHandler(qb, imageFilter.Galleries)) - query.handleCriterion(ctx, imagePerformersCriterionHandler(qb, imageFilter.Performers)) - query.handleCriterion(ctx, imagePerformerCountCriterionHandler(qb, imageFilter.PerformerCount)) - query.handleCriterion(ctx, studioCriterionHandler(imageTable, imageFilter.Studios)) - query.handleCriterion(ctx, imagePerformerTagsCriterionHandler(qb, imageFilter.PerformerTags)) - query.handleCriterion(ctx, imagePerformerFavoriteCriterionHandler(imageFilter.PerformerFavorite)) - query.handleCriterion(ctx, imagePerformerAgeCriterionHandler(imageFilter.PerformerAge)) - query.handleCriterion(ctx, timestampCriterionHandler(imageFilter.CreatedAt, "images.created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(imageFilter.UpdatedAt, "images.updated_at")) - - return query -} - -func (qb *ImageStore) addImagesFilesTable(f *filterBuilder) { - f.addLeftJoin(imagesFilesTable, "", "images_files.image_id = images.id") -} - -func (qb *ImageStore) addFilesTable(f *filterBuilder) { - qb.addImagesFilesTable(f) - f.addLeftJoin(fileTable, "", "images_files.file_id = files.id") -} - -func (qb *ImageStore) addFoldersTable(f *filterBuilder) { - qb.addFilesTable(f) - f.addLeftJoin(folderTable, "", "files.parent_folder_id = folders.id") -} - -func (qb *ImageStore) addImageFilesTable(f *filterBuilder) { - qb.addImagesFilesTable(f) - f.addLeftJoin(imageFileTable, "", "image_files.file_id = images_files.file_id") -} - func (qb *ImageStore) makeQuery(ctx context.Context, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { if imageFilter == nil { imageFilter = &models.ImageFilterType{} @@ -754,7 +774,7 @@ func (qb *ImageStore) makeQuery(ctx context.Context, imageFilter *models.ImageFi findFilter = &models.FindFilterType{} } - query := qb.newQuery() + query := imageRepository.newQuery() distinctIDs(&query, imageTable) if q := findFilter.Q; q != nil && *q != "" { @@ -782,10 +802,9 @@ func (qb *ImageStore) makeQuery(ctx context.Context, imageFilter *models.ImageFi query.parseQueryString(searchColumns, *q) } - if err := qb.validateFilter(imageFilter); err != nil { - return nil, err - } - filter := qb.makeFilter(ctx, imageFilter) + filter := filterBuilderFromHandler(ctx, &imageFilterHandler{ + imageFilter: imageFilter, + }) if err := query.addFilter(filter); err != nil { return nil, err @@ -824,7 +843,7 @@ func (qb *ImageStore) queryGroupedFields(ctx context.Context, options models.Ima return models.NewImageQueryResult(qb), nil } - aggregateQuery := qb.newQuery() + aggregateQuery := imageRepository.newQuery() if options.Count { aggregateQuery.addColumn("COUNT(DISTINCT temp.id) as total") @@ -868,7 +887,7 @@ func (qb *ImageStore) queryGroupedFields(ctx context.Context, options models.Ima Megapixels null.Float Size null.Float }{} - if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + if err := imageRepository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { return nil, err } @@ -888,171 +907,6 @@ func (qb *ImageStore) QueryCount(ctx context.Context, imageFilter *models.ImageF return query.executeCount(ctx) } -func imageFileCountCriterionHandler(qb *ImageStore, fileCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: imageTable, - joinTable: imagesFilesTable, - primaryFK: imageIDColumn, - } - - return h.handler(fileCount) -} - -func imageIsMissingCriterionHandler(qb *ImageStore, isMissing *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if isMissing != nil && *isMissing != "" { - switch *isMissing { - case "studio": - f.addWhere("images.studio_id IS NULL") - case "performers": - qb.performersRepository().join(f, "performers_join", "images.id") - f.addWhere("performers_join.image_id IS NULL") - case "galleries": - qb.galleriesRepository().join(f, "galleries_join", "images.id") - f.addWhere("galleries_join.image_id IS NULL") - case "tags": - qb.tagsRepository().join(f, "tags_join", "images.id") - f.addWhere("tags_join.image_id IS NULL") - default: - f.addWhere("(images." + *isMissing + " IS NULL OR TRIM(images." + *isMissing + ") = '')") - } - } - } -} - -func imageURLsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { - h := stringListCriterionHandlerBuilder{ - joinTable: imagesURLsTable, - stringColumn: imageURLColumn, - addJoinTable: func(f *filterBuilder) { - imagesURLsTableMgr.join(f, "", "images.id") - }, - } - - return h.handler(url) -} - -func (qb *ImageStore) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { - return multiCriterionHandlerBuilder{ - primaryTable: imageTable, - foreignTable: foreignTable, - joinTable: joinTable, - primaryFK: imageIDColumn, - foreignFK: foreignFK, - addJoinsFunc: addJoinsFunc, - } -} - -func imageTagsCriterionHandler(qb *ImageStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := joinedHierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: imageTable, - foreignTable: tagTable, - foreignFK: "tag_id", - - relationsTable: "tags_relations", - joinAs: "image_tag", - joinTable: imagesTagsTable, - primaryFK: imageIDColumn, - } - - return h.handler(tags) -} - -func imageTagCountCriterionHandler(qb *ImageStore, tagCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: imageTable, - joinTable: imagesTagsTable, - primaryFK: imageIDColumn, - } - - return h.handler(tagCount) -} - -func imageGalleriesCriterionHandler(qb *ImageStore, galleries *models.MultiCriterionInput) criterionHandlerFunc { - addJoinsFunc := func(f *filterBuilder) { - if galleries.Modifier == models.CriterionModifierIncludes || galleries.Modifier == models.CriterionModifierIncludesAll { - f.addInnerJoin(galleriesImagesTable, "", "galleries_images.image_id = images.id") - f.addInnerJoin(galleryTable, "", "galleries_images.gallery_id = galleries.id") - } - } - h := qb.getMultiCriterionHandlerBuilder(galleryTable, galleriesImagesTable, galleryIDColumn, addJoinsFunc) - - return h.handler(galleries) -} - -func imagePerformersCriterionHandler(qb *ImageStore, performers *models.MultiCriterionInput) criterionHandlerFunc { - h := joinedMultiCriterionHandlerBuilder{ - primaryTable: imageTable, - joinTable: performersImagesTable, - joinAs: "performers_join", - primaryFK: imageIDColumn, - foreignFK: performerIDColumn, - - addJoinTable: func(f *filterBuilder) { - qb.performersRepository().join(f, "performers_join", "images.id") - }, - } - - return h.handler(performers) -} - -func imagePerformerCountCriterionHandler(qb *ImageStore, performerCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: imageTable, - joinTable: performersImagesTable, - primaryFK: imageIDColumn, - } - - return h.handler(performerCount) -} - -func imagePerformerFavoriteCriterionHandler(performerfavorite *bool) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performerfavorite != nil { - f.addLeftJoin("performers_images", "", "images.id = performers_images.image_id") - - if *performerfavorite { - // contains at least one favorite - f.addLeftJoin("performers", "", "performers.id = performers_images.performer_id") - f.addWhere("performers.favorite = 1") - } else { - // contains zero favorites - f.addLeftJoin(`(SELECT performers_images.image_id as id FROM performers_images -JOIN performers ON performers.id = performers_images.performer_id -GROUP BY performers_images.image_id HAVING SUM(performers.favorite) = 0)`, "nofaves", "images.id = nofaves.id") - f.addWhere("performers_images.image_id IS NULL OR nofaves.id IS NOT NULL") - } - } - } -} - -func imagePerformerAgeCriterionHandler(performerAge *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performerAge != nil { - f.addInnerJoin("performers_images", "", "images.id = performers_images.image_id") - f.addInnerJoin("performers", "", "performers_images.performer_id = performers.id") - - f.addWhere("images.date != '' AND performers.birthdate != ''") - f.addWhere("images.date IS NOT NULL AND performers.birthdate IS NOT NULL") - - ageCalc := "cast(strftime('%Y.%m%d', images.date) - strftime('%Y.%m%d', performers.birthdate) as int)" - whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2) - f.addWhere(whereClause, args...) - } - } -} - -func imagePerformerTagsCriterionHandler(qb *ImageStore, tags *models.HierarchicalMultiCriterionInput) criterionHandler { - return &joinedPerformerTagsHandler{ - criterion: tags, - primaryTable: imageTable, - joinTable: performersImagesTable, - joinPrimaryKey: imageIDColumn, - } -} - var imageSortOptions = sortOptions{ "created_at", "date", @@ -1138,34 +992,13 @@ func (qb *ImageStore) setImageSortAndPagination(q *queryBuilder, findFilter *mod return nil } -func (qb *ImageStore) galleriesRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: galleriesImagesTable, - idColumn: imageIDColumn, - }, - fkColumn: galleryIDColumn, - } -} - -func (qb *ImageStore) filesRepository() *filesRepository { - return &filesRepository{ - repository: repository{ - tx: qb.tx, - tableName: imagesFilesTable, - idColumn: imageIDColumn, - }, - } -} - func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *ImageStore) GetGalleryIDs(ctx context.Context, imageID int) ([]int, error) { - return qb.galleriesRepository().getIDs(ctx, imageID) + return imageRepository.galleries.getIDs(ctx, imageID) } // func (qb *imageQueryBuilder) UpdateGalleries(ctx context.Context, imageID int, galleryIDs []int) error { @@ -1173,46 +1006,22 @@ func (qb *ImageStore) GetGalleryIDs(ctx context.Context, imageID int) ([]int, er // return qb.galleriesRepository().replace(ctx, imageID, galleryIDs) // } -func (qb *ImageStore) performersRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: performersImagesTable, - idColumn: imageIDColumn, - }, - fkColumn: performerIDColumn, - } -} - func (qb *ImageStore) GetPerformerIDs(ctx context.Context, imageID int) ([]int, error) { - return qb.performersRepository().getIDs(ctx, imageID) + return imageRepository.performers.getIDs(ctx, imageID) } func (qb *ImageStore) UpdatePerformers(ctx context.Context, imageID int, performerIDs []int) error { // Delete the existing joins and then create new ones - return qb.performersRepository().replace(ctx, imageID, performerIDs) -} - -func (qb *ImageStore) tagsRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: imagesTagsTable, - idColumn: imageIDColumn, - }, - fkColumn: tagIDColumn, - foreignTable: tagTable, - orderBy: "tags.name ASC", - } + return imageRepository.performers.replace(ctx, imageID, performerIDs) } func (qb *ImageStore) GetTagIDs(ctx context.Context, imageID int) ([]int, error) { - return qb.tagsRepository().getIDs(ctx, imageID) + return imageRepository.tags.getIDs(ctx, imageID) } func (qb *ImageStore) UpdateTags(ctx context.Context, imageID int, tagIDs []int) error { // Delete the existing joins and then create new ones - return qb.tagsRepository().replace(ctx, imageID, tagIDs) + return imageRepository.tags.replace(ctx, imageID, tagIDs) } func (qb *ImageStore) GetURLs(ctx context.Context, imageID int) ([]string, error) { diff --git a/pkg/sqlite/image_filter.go b/pkg/sqlite/image_filter.go new file mode 100644 index 00000000000..8f2d5d6b90a --- /dev/null +++ b/pkg/sqlite/image_filter.go @@ -0,0 +1,292 @@ +package sqlite + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" +) + +type imageFilterHandler struct { + imageFilter *models.ImageFilterType +} + +func (qb *imageFilterHandler) validate() error { + imageFilter := qb.imageFilter + if imageFilter == nil { + return nil + } + + if err := validateFilterCombination(imageFilter.OperatorFilter); err != nil { + return err + } + + if subFilter := imageFilter.SubFilter(); subFilter != nil { + sqb := &imageFilterHandler{imageFilter: subFilter} + if err := sqb.validate(); err != nil { + return err + } + } + + return nil +} + +func (qb *imageFilterHandler) handle(ctx context.Context, f *filterBuilder) { + imageFilter := qb.imageFilter + if imageFilter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + sf := imageFilter.SubFilter() + if sf != nil { + sub := &imageFilterHandler{sf} + handleSubFilter(ctx, sub, f, imageFilter.OperatorFilter) + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +func (qb *imageFilterHandler) criterionHandler() criterionHandler { + imageFilter := qb.imageFilter + return compoundHandler{ + intCriterionHandler(imageFilter.ID, "images.id", nil), + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if imageFilter.Checksum != nil { + imageRepository.addImagesFilesTable(f) + f.addInnerJoin(fingerprintTable, "fingerprints_md5", "images_files.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'") + } + + stringCriterionHandler(imageFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) + }), + stringCriterionHandler(imageFilter.Title, "images.title"), + stringCriterionHandler(imageFilter.Code, "images.code"), + stringCriterionHandler(imageFilter.Details, "images.details"), + stringCriterionHandler(imageFilter.Photographer, "images.photographer"), + + pathCriterionHandler(imageFilter.Path, "folders.path", "files.basename", imageRepository.addFoldersTable), + qb.fileCountCriterionHandler(imageFilter.FileCount), + intCriterionHandler(imageFilter.Rating100, "images.rating", nil), + intCriterionHandler(imageFilter.OCounter, "images.o_counter", nil), + boolCriterionHandler(imageFilter.Organized, "images.organized", nil), + &dateCriterionHandler{imageFilter.Date, "images.date", nil}, + qb.urlsCriterionHandler(imageFilter.URL), + + resolutionCriterionHandler(imageFilter.Resolution, "image_files.height", "image_files.width", imageRepository.addImageFilesTable), + orientationCriterionHandler(imageFilter.Orientation, "image_files.height", "image_files.width", imageRepository.addImageFilesTable), + qb.missingCriterionHandler(imageFilter.IsMissing), + + qb.tagsCriterionHandler(imageFilter.Tags), + qb.tagCountCriterionHandler(imageFilter.TagCount), + qb.galleriesCriterionHandler(imageFilter.Galleries), + qb.performersCriterionHandler(imageFilter.Performers), + qb.performerCountCriterionHandler(imageFilter.PerformerCount), + studioCriterionHandler(imageTable, imageFilter.Studios), + qb.performerTagsCriterionHandler(imageFilter.PerformerTags), + qb.performerFavoriteCriterionHandler(imageFilter.PerformerFavorite), + qb.performerAgeCriterionHandler(imageFilter.PerformerAge), + ×tampCriterionHandler{imageFilter.CreatedAt, "images.created_at", nil}, + ×tampCriterionHandler{imageFilter.UpdatedAt, "images.updated_at", nil}, + + &relatedFilterHandler{ + relatedIDCol: "galleries_images.gallery_id", + relatedRepo: galleryRepository.repository, + relatedHandler: &galleryFilterHandler{imageFilter.GalleriesFilter}, + joinFn: func(f *filterBuilder) { + imageRepository.galleries.innerJoin(f, "", "images.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performers_join.performer_id", + relatedRepo: performerRepository.repository, + relatedHandler: &performerFilterHandler{imageFilter.PerformersFilter}, + joinFn: func(f *filterBuilder) { + imageRepository.performers.innerJoin(f, "performers_join", "images.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "images.studio_id", + relatedRepo: studioRepository.repository, + relatedHandler: &studioFilterHandler{imageFilter.StudiosFilter}, + }, + + &relatedFilterHandler{ + relatedIDCol: "image_tag.tag_id", + relatedRepo: tagRepository.repository, + relatedHandler: &tagFilterHandler{imageFilter.TagsFilter}, + joinFn: func(f *filterBuilder) { + imageRepository.tags.innerJoin(f, "image_tag", "images.id") + }, + }, + } +} + +func (qb *imageFilterHandler) fileCountCriterionHandler(fileCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: imageTable, + joinTable: imagesFilesTable, + primaryFK: imageIDColumn, + } + + return h.handler(fileCount) +} + +func (qb *imageFilterHandler) missingCriterionHandler(isMissing *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "studio": + f.addWhere("images.studio_id IS NULL") + case "performers": + imageRepository.performers.join(f, "performers_join", "images.id") + f.addWhere("performers_join.image_id IS NULL") + case "galleries": + imageRepository.galleries.join(f, "galleries_join", "images.id") + f.addWhere("galleries_join.image_id IS NULL") + case "tags": + imageRepository.tags.join(f, "tags_join", "images.id") + f.addWhere("tags_join.image_id IS NULL") + default: + f.addWhere("(images." + *isMissing + " IS NULL OR TRIM(images." + *isMissing + ") = '')") + } + } + } +} + +func (qb *imageFilterHandler) urlsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: imageTable, + primaryFK: imageIDColumn, + joinTable: imagesURLsTable, + stringColumn: imageURLColumn, + addJoinTable: func(f *filterBuilder) { + imagesURLsTableMgr.join(f, "", "images.id") + }, + } + + return h.handler(url) +} + +func (qb *imageFilterHandler) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { + return multiCriterionHandlerBuilder{ + primaryTable: imageTable, + foreignTable: foreignTable, + joinTable: joinTable, + primaryFK: imageIDColumn, + foreignFK: foreignFK, + addJoinsFunc: addJoinsFunc, + } +} + +func (qb *imageFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: imageTable, + foreignTable: tagTable, + foreignFK: "tag_id", + + relationsTable: "tags_relations", + joinAs: "image_tag", + joinTable: imagesTagsTable, + primaryFK: imageIDColumn, + } + + return h.handler(tags) +} + +func (qb *imageFilterHandler) tagCountCriterionHandler(tagCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: imageTable, + joinTable: imagesTagsTable, + primaryFK: imageIDColumn, + } + + return h.handler(tagCount) +} + +func (qb *imageFilterHandler) galleriesCriterionHandler(galleries *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + if galleries.Modifier == models.CriterionModifierIncludes || galleries.Modifier == models.CriterionModifierIncludesAll { + f.addInnerJoin(galleriesImagesTable, "", "galleries_images.image_id = images.id") + f.addInnerJoin(galleryTable, "", "galleries_images.gallery_id = galleries.id") + } + } + h := qb.getMultiCriterionHandlerBuilder(galleryTable, galleriesImagesTable, galleryIDColumn, addJoinsFunc) + + return h.handler(galleries) +} + +func (qb *imageFilterHandler) performersCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { + h := joinedMultiCriterionHandlerBuilder{ + primaryTable: imageTable, + joinTable: performersImagesTable, + joinAs: "performers_join", + primaryFK: imageIDColumn, + foreignFK: performerIDColumn, + + addJoinTable: func(f *filterBuilder) { + imageRepository.performers.join(f, "performers_join", "images.id") + }, + } + + return h.handler(performers) +} + +func (qb *imageFilterHandler) performerCountCriterionHandler(performerCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: imageTable, + joinTable: performersImagesTable, + primaryFK: imageIDColumn, + } + + return h.handler(performerCount) +} + +func (qb *imageFilterHandler) performerFavoriteCriterionHandler(performerfavorite *bool) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performerfavorite != nil { + f.addLeftJoin("performers_images", "", "images.id = performers_images.image_id") + + if *performerfavorite { + // contains at least one favorite + f.addLeftJoin("performers", "", "performers.id = performers_images.performer_id") + f.addWhere("performers.favorite = 1") + } else { + // contains zero favorites + f.addLeftJoin(`(SELECT performers_images.image_id as id FROM performers_images +JOIN performers ON performers.id = performers_images.performer_id +GROUP BY performers_images.image_id HAVING SUM(performers.favorite) = 0)`, "nofaves", "images.id = nofaves.id") + f.addWhere("performers_images.image_id IS NULL OR nofaves.id IS NOT NULL") + } + } + } +} + +func (qb *imageFilterHandler) performerAgeCriterionHandler(performerAge *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performerAge != nil { + f.addInnerJoin("performers_images", "", "images.id = performers_images.image_id") + f.addInnerJoin("performers", "", "performers_images.performer_id = performers.id") + + f.addWhere("images.date != '' AND performers.birthdate != ''") + f.addWhere("images.date IS NOT NULL AND performers.birthdate IS NOT NULL") + + ageCalc := "cast(strftime('%Y.%m%d', images.date) - strftime('%Y.%m%d', performers.birthdate) as int)" + whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2) + f.addWhere(whereClause, args...) + } + } +} + +func (qb *imageFilterHandler) performerTagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandler { + return &joinedPerformerTagsHandler{ + criterion: tags, + primaryTable: imageTable, + joinTable: performersImagesTable, + joinPrimaryKey: imageIDColumn, + } +} diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index 7a5b9ce1e1a..aa4ed3b99ad 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -1668,10 +1668,12 @@ func TestImageQueryPathOr(t *testing.T) { Value: image1Path, Modifier: models.CriterionModifierEquals, }, - Or: &models.ImageFilterType{ - Path: &models.StringCriterionInput{ - Value: image2Path, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.ImageFilterType]{ + Or: &models.ImageFilterType{ + Path: &models.StringCriterionInput{ + Value: image2Path, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -1702,10 +1704,12 @@ func TestImageQueryPathAndRating(t *testing.T) { Value: imagePath, Modifier: models.CriterionModifierEquals, }, - And: &models.ImageFilterType{ - Rating100: &models.IntCriterionInput{ - Value: int(imageRating.Int64), - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.ImageFilterType]{ + And: &models.ImageFilterType{ + Rating100: &models.IntCriterionInput{ + Value: int(imageRating.Int64), + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -1743,8 +1747,10 @@ func TestImageQueryPathNotRating(t *testing.T) { imageFilter := models.ImageFilterType{ Path: &pathCriterion, - Not: &models.ImageFilterType{ - Rating100: &ratingCriterion, + OperatorFilter: models.OperatorFilter[models.ImageFilterType]{ + Not: &models.ImageFilterType{ + Rating100: &ratingCriterion, + }, }, } @@ -1775,8 +1781,10 @@ func TestImageIllegalQuery(t *testing.T) { } imageFilter := &models.ImageFilterType{ - And: &subFilter, - Or: &subFilter, + OperatorFilter: models.OperatorFilter[models.ImageFilterType]{ + And: &subFilter, + Or: &subFilter, + }, } withTxn(func(ctx context.Context) error { @@ -2045,7 +2053,7 @@ func TestImageQueryIsMissingPerformers(t *testing.T) { assert.True(t, len(images) > 0) - // ensure non of the ids equal the one with movies + // ensure non of the ids equal the one with performers for _, image := range images { assert.NotEqual(t, imageIDs[imageIdxWithPerformer], image.ID) } diff --git a/pkg/sqlite/migrations/59_movie_urls.up.sql b/pkg/sqlite/migrations/59_movie_urls.up.sql new file mode 100644 index 00000000000..3ea860e3020 --- /dev/null +++ b/pkg/sqlite/migrations/59_movie_urls.up.sql @@ -0,0 +1,83 @@ +PRAGMA foreign_keys=OFF; + +CREATE TABLE `movie_urls` ( + `movie_id` integer NOT NULL, + `position` integer NOT NULL, + `url` varchar(255) NOT NULL, + foreign key(`movie_id`) references `movies`(`id`) on delete CASCADE, + PRIMARY KEY(`movie_id`, `position`, `url`) +); + +CREATE INDEX `movie_urls_url` on `movie_urls` (`url`); + +-- drop url +CREATE TABLE `movies_new` ( + `id` integer not null primary key autoincrement, + `name` varchar(255) not null, + `aliases` varchar(255), + `duration` integer, + `date` date, + `rating` tinyint, + `studio_id` integer REFERENCES `studios`(`id`) ON DELETE SET NULL, + `director` varchar(255), + `synopsis` text, + `created_at` datetime not null, + `updated_at` datetime not null, + `front_image_blob` varchar(255) REFERENCES `blobs`(`checksum`), + `back_image_blob` varchar(255) REFERENCES `blobs`(`checksum`) +); + +INSERT INTO `movies_new` + ( + `id`, + `name`, + `aliases`, + `duration`, + `date`, + `rating`, + `studio_id`, + `director`, + `synopsis`, + `created_at`, + `updated_at`, + `front_image_blob`, + `back_image_blob` + ) + SELECT + `id`, + `name`, + `aliases`, + `duration`, + `date`, + `rating`, + `studio_id`, + `director`, + `synopsis`, + `created_at`, + `updated_at`, + `front_image_blob`, + `back_image_blob` + FROM `movies`; + +INSERT INTO `movie_urls` + ( + `movie_id`, + `position`, + `url` + ) + SELECT + `id`, + '0', + `url` + FROM `movies` + WHERE `movies`.`url` IS NOT NULL AND `movies`.`url` != ''; + +DROP INDEX `index_movies_on_name_unique`; +DROP INDEX `index_movies_on_studio_id`; +DROP TABLE `movies`; +ALTER TABLE `movies_new` rename to `movies`; + +CREATE INDEX `index_movies_on_name` ON `movies`(`name`); +CREATE INDEX `index_movies_on_studio_id` on `movies` (`studio_id`); + +PRAGMA foreign_keys=ON; diff --git a/pkg/sqlite/migrations/60_default_filter_move.up.sql b/pkg/sqlite/migrations/60_default_filter_move.up.sql new file mode 100644 index 00000000000..2c6f6e1fcb8 --- /dev/null +++ b/pkg/sqlite/migrations/60_default_filter_move.up.sql @@ -0,0 +1,2 @@ +-- no schema changes +-- default filters will be removed in post-migration \ No newline at end of file diff --git a/pkg/sqlite/migrations/60_postmigrate.go b/pkg/sqlite/migrations/60_postmigrate.go new file mode 100644 index 00000000000..dfed33f1886 --- /dev/null +++ b/pkg/sqlite/migrations/60_postmigrate.go @@ -0,0 +1,176 @@ +package migrations + +import ( + "context" + "encoding/json" + "fmt" + "os" + "strings" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/internal/manager/config" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/sqlite" +) + +type schema60Migrator struct { + migrator +} + +func post60(ctx context.Context, db *sqlx.DB) error { + logger.Info("Running post-migration for schema version 60") + + m := schema60Migrator{ + migrator: migrator{ + db: db, + }, + } + + return m.migrate(ctx) +} + +func (m *schema60Migrator) decodeJSON(s string, v interface{}) { + if s == "" { + return + } + + if err := json.Unmarshal([]byte(s), v); err != nil { + logger.Errorf("error decoding json %q: %v", s, err) + } +} + +type schema60DefaultFilters map[string]interface{} + +func (m *schema60Migrator) migrate(ctx context.Context) error { + + // save default filters into the UI config + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + query := "SELECT id, mode, find_filter, object_filter, ui_options FROM `saved_filters` WHERE `name` = ''" + + rows, err := m.db.Query(query) + if err != nil { + return err + } + defer rows.Close() + + defaultFilters := make(schema60DefaultFilters) + + for rows.Next() { + var ( + id int + mode string + findFilterStr string + objectFilterStr string + uiOptionsStr string + ) + + if err := rows.Scan(&id, &mode, &findFilterStr, &objectFilterStr, &uiOptionsStr); err != nil { + return err + } + + // convert the filters to the correct format + findFilter := make(map[string]interface{}) + objectFilter := make(map[string]interface{}) + uiOptions := make(map[string]interface{}) + + m.decodeJSON(findFilterStr, &findFilter) + m.decodeJSON(objectFilterStr, &objectFilter) + m.decodeJSON(uiOptionsStr, &uiOptions) + + o := map[string]interface{}{ + "mode": mode, + "find_filter": findFilter, + "object_filter": objectFilter, + "ui_options": uiOptions, + } + + defaultFilters[strings.ToLower(mode)] = o + } + + if err := rows.Err(); err != nil { + return err + } + + if err := m.saveDefaultFilters(defaultFilters); err != nil { + return fmt.Errorf("saving default filters: %w", err) + } + + // remove the default filters from the database + query = "DELETE FROM `saved_filters` WHERE `name` = ''" + if _, err := m.db.Exec(query); err != nil { + return fmt.Errorf("deleting default filters: %w", err) + } + + return nil + }); err != nil { + return err + } + + return nil +} + +func (m *schema60Migrator) saveDefaultFilters(defaultFilters schema60DefaultFilters) error { + if len(defaultFilters) == 0 { + logger.Debugf("no default filters to save") + return nil + } + + // save the default filters into the UI config + config := config.GetInstance() + + orgPath := config.GetConfigFile() + + if orgPath == "" { + // no config file to migrate (usually in a test or new system) + logger.Debugf("no config file to migrate") + return nil + } + + uiConfig := config.GetUIConfiguration() + if uiConfig == nil { + uiConfig = make(map[string]interface{}) + } + + // if the defaultFilters key already exists, don't overwrite them + if _, found := uiConfig["defaultFilters"]; found { + logger.Warn("defaultFilters already exists in the UI config, skipping migration") + return nil + } + + if err := m.backupConfig(orgPath); err != nil { + return fmt.Errorf("backing up config: %w", err) + } + + uiConfig["defaultFilters"] = map[string]interface{}(defaultFilters) + config.SetUIConfiguration(uiConfig) + + if err := config.Write(); err != nil { + return fmt.Errorf("failed to write config: %w", err) + } + + return nil +} + +func (m *schema60Migrator) backupConfig(orgPath string) error { + c := config.GetInstance() + + // save a backup of the original config file + backupPath := fmt.Sprintf("%s.59.%s", orgPath, time.Now().Format("20060102_150405")) + + data, err := c.Marshal() + if err != nil { + return fmt.Errorf("failed to marshal backup config: %w", err) + } + + logger.Infof("Backing up config to %s", backupPath) + if err := os.WriteFile(backupPath, data, 0644); err != nil { + return fmt.Errorf("failed to write backup config: %w", err) + } + + return nil +} + +func init() { + sqlite.RegisterPostMigration(60, post60) +} diff --git a/pkg/sqlite/migrations/61_movie_tags.up.sql b/pkg/sqlite/migrations/61_movie_tags.up.sql new file mode 100644 index 00000000000..cf898e2c590 --- /dev/null +++ b/pkg/sqlite/migrations/61_movie_tags.up.sql @@ -0,0 +1,10 @@ +CREATE TABLE `movies_tags` ( + `movie_id` integer NOT NULL, + `tag_id` integer NOT NULL, + foreign key(`movie_id`) references `movies`(`id`) on delete CASCADE, + foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE, + PRIMARY KEY(`movie_id`, `tag_id`) +); + +CREATE INDEX `index_movies_tags_on_tag_id` on `movies_tags` (`tag_id`); +CREATE INDEX `index_movies_tags_on_movie_id` on `movies_tags` (`movie_id`); diff --git a/pkg/sqlite/migrations/62_performer_urls.up.sql b/pkg/sqlite/migrations/62_performer_urls.up.sql new file mode 100644 index 00000000000..cebfa86d616 --- /dev/null +++ b/pkg/sqlite/migrations/62_performer_urls.up.sql @@ -0,0 +1,155 @@ +PRAGMA foreign_keys=OFF; + +CREATE TABLE `performer_urls` ( + `performer_id` integer NOT NULL, + `position` integer NOT NULL, + `url` varchar(255) NOT NULL, + foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE, + PRIMARY KEY(`performer_id`, `position`, `url`) +); + +CREATE INDEX `performers_urls_url` on `performer_urls` (`url`); + +-- drop url, twitter and instagram +-- make name not null +CREATE TABLE `performers_new` ( + `id` integer not null primary key autoincrement, + `name` varchar(255) not null, + `disambiguation` varchar(255), + `gender` varchar(20), + `birthdate` date, + `ethnicity` varchar(255), + `country` varchar(255), + `eye_color` varchar(255), + `height` int, + `measurements` varchar(255), + `fake_tits` varchar(255), + `career_length` varchar(255), + `tattoos` varchar(255), + `piercings` varchar(255), + `favorite` boolean not null default '0', + `created_at` datetime not null, + `updated_at` datetime not null, + `details` text, + `death_date` date, + `hair_color` varchar(255), + `weight` integer, + `rating` tinyint, + `ignore_auto_tag` boolean not null default '0', + `image_blob` varchar(255) REFERENCES `blobs`(`checksum`), + `penis_length` float, + `circumcised` varchar[10] +); + +INSERT INTO `performers_new` + ( + `id`, + `name`, + `disambiguation`, + `gender`, + `birthdate`, + `ethnicity`, + `country`, + `eye_color`, + `height`, + `measurements`, + `fake_tits`, + `career_length`, + `tattoos`, + `piercings`, + `favorite`, + `created_at`, + `updated_at`, + `details`, + `death_date`, + `hair_color`, + `weight`, + `rating`, + `ignore_auto_tag`, + `image_blob`, + `penis_length`, + `circumcised` + ) + SELECT + `id`, + `name`, + `disambiguation`, + `gender`, + `birthdate`, + `ethnicity`, + `country`, + `eye_color`, + `height`, + `measurements`, + `fake_tits`, + `career_length`, + `tattoos`, + `piercings`, + `favorite`, + `created_at`, + `updated_at`, + `details`, + `death_date`, + `hair_color`, + `weight`, + `rating`, + `ignore_auto_tag`, + `image_blob`, + `penis_length`, + `circumcised` + FROM `performers`; + +INSERT INTO `performer_urls` + ( + `performer_id`, + `position`, + `url` + ) + SELECT + `id`, + '0', + `url` + FROM `performers` + WHERE `performers`.`url` IS NOT NULL AND `performers`.`url` != ''; + +INSERT INTO `performer_urls` + ( + `performer_id`, + `position`, + `url` + ) + SELECT + `id`, + (SELECT count(*) FROM `performer_urls` WHERE `performer_id` = `performers`.`id`)+1, + CASE + WHEN `twitter` LIKE 'http%://%' THEN `twitter` + ELSE 'https://www.twitter.com/' || `twitter` + END + FROM `performers` + WHERE `performers`.`twitter` IS NOT NULL AND `performers`.`twitter` != ''; + +INSERT INTO `performer_urls` + ( + `performer_id`, + `position`, + `url` + ) + SELECT + `id`, + (SELECT count(*) FROM `performer_urls` WHERE `performer_id` = `performers`.`id`)+1, + CASE + WHEN `instagram` LIKE 'http%://%' THEN `instagram` + ELSE 'https://www.instagram.com/' || `instagram` + END + FROM `performers` + WHERE `performers`.`instagram` IS NOT NULL AND `performers`.`instagram` != ''; + +DROP INDEX `performers_name_disambiguation_unique`; +DROP INDEX `performers_name_unique`; +DROP TABLE `performers`; +ALTER TABLE `performers_new` rename to `performers`; + +CREATE UNIQUE INDEX `performers_name_disambiguation_unique` on `performers` (`name`, `disambiguation`) WHERE `disambiguation` IS NOT NULL; +CREATE UNIQUE INDEX `performers_name_unique` on `performers` (`name`) WHERE `disambiguation` IS NULL; + +PRAGMA foreign_keys=ON; diff --git a/pkg/sqlite/migrations/63_studio_tags.up.sql b/pkg/sqlite/migrations/63_studio_tags.up.sql new file mode 100644 index 00000000000..ea652f18c1e --- /dev/null +++ b/pkg/sqlite/migrations/63_studio_tags.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `studios_tags` ( + `studio_id` integer NOT NULL, + `tag_id` integer NOT NULL, + foreign key(`studio_id`) references `studios`(`id`) on delete CASCADE, + foreign key(`tag_id`) references `tags`(`id`) on delete CASCADE, + PRIMARY KEY(`studio_id`, `tag_id`) +); + +CREATE INDEX `index_studios_tags_on_tag_id` on `studios_tags` (`tag_id`); \ No newline at end of file diff --git a/pkg/sqlite/migrations/64_fixes.up.sql b/pkg/sqlite/migrations/64_fixes.up.sql new file mode 100644 index 00000000000..6128c292d3c --- /dev/null +++ b/pkg/sqlite/migrations/64_fixes.up.sql @@ -0,0 +1,49 @@ +PRAGMA foreign_keys=OFF; + +-- recreate scenes_view_dates adding not null to scene_id and adding indexes +CREATE TABLE `scenes_view_dates_new` ( + `scene_id` integer not null, + `view_date` datetime not null, + foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE +); + +INSERT INTO `scenes_view_dates_new` + ( + `scene_id`, + `view_date` + ) + SELECT + `scene_id`, + `view_date` + FROM `scenes_view_dates` + WHERE `scenes_view_dates`.`scene_id` IS NOT NULL; + +DROP INDEX IF EXISTS `index_scenes_view_dates`; +DROP TABLE `scenes_view_dates`; +ALTER TABLE `scenes_view_dates_new` rename to `scenes_view_dates`; +CREATE INDEX `index_scenes_view_dates` ON `scenes_view_dates` (`scene_id`); + +-- recreate scenes_o_dates adding not null to scene_id and adding indexes +CREATE TABLE `scenes_o_dates_new` ( + `scene_id` integer not null, + `o_date` datetime not null, + foreign key(`scene_id`) references `scenes`(`id`) on delete CASCADE +); + +INSERT INTO `scenes_o_dates_new` + ( + `scene_id`, + `o_date` + ) + SELECT + `scene_id`, + `o_date` + FROM `scenes_o_dates` + WHERE `scenes_o_dates`.`scene_id` IS NOT NULL; + +DROP INDEX IF EXISTS `index_scenes_o_dates`; +DROP TABLE `scenes_o_dates`; +ALTER TABLE `scenes_o_dates_new` rename to `scenes_o_dates`; +CREATE INDEX `index_scenes_o_dates` ON `scenes_o_dates` (`scene_id`); + +PRAGMA foreign_keys=ON; \ No newline at end of file diff --git a/pkg/sqlite/migrations/64_postmigrate.go b/pkg/sqlite/migrations/64_postmigrate.go new file mode 100644 index 00000000000..5b0f31a256d --- /dev/null +++ b/pkg/sqlite/migrations/64_postmigrate.go @@ -0,0 +1,92 @@ +package migrations + +import ( + "context" + "fmt" + "time" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/sqlite" +) + +// this is a copy of the 55 post migration +// some non-UTC dates were missed, so we need to correct them + +type schema64Migrator struct { + migrator +} + +func post64(ctx context.Context, db *sqlx.DB) error { + logger.Info("Running post-migration for schema version 64") + + m := schema64Migrator{ + migrator: migrator{ + db: db, + }, + } + + return m.migrate(ctx) +} + +func (m *schema64Migrator) migrate(ctx context.Context) error { + // the last_played_at column was storing in a different format than the rest of the timestamps + // convert the play history date to the correct format + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + query := "SELECT DISTINCT `scene_id`, `view_date` FROM `scenes_view_dates`" + + rows, err := m.db.Query(query) + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var ( + id int + viewDate sqlite.Timestamp + ) + + err := rows.Scan(&id, &viewDate) + if err != nil { + return err + } + + // skip if already in the correct format + if viewDate.Timestamp.Location() == time.UTC { + logger.Debugf("view date %s is already in the correct format", viewDate.Timestamp) + continue + } + + utcTimestamp := sqlite.UTCTimestamp{ + Timestamp: viewDate, + } + + // convert the timestamp to the correct format + logger.Debugf("correcting view date %q to UTC date %q for scene %d", viewDate.Timestamp, viewDate.Timestamp.UTC(), id) + r, err := m.db.Exec("UPDATE scenes_view_dates SET view_date = ? WHERE scene_id = ? AND (view_date = ? OR view_date = ?)", utcTimestamp, id, viewDate.Timestamp, viewDate) + if err != nil { + return fmt.Errorf("error correcting view date %s to %s: %w", viewDate.Timestamp, viewDate, err) + } + + rowsAffected, err := r.RowsAffected() + if err != nil { + return err + } + + if rowsAffected == 0 { + return fmt.Errorf("no rows affected when updating view date %s to %s for scene %d", viewDate.Timestamp, viewDate.Timestamp.UTC(), id) + } + } + + return rows.Err() + }); err != nil { + return err + } + + return nil +} + +func init() { + sqlite.RegisterPostMigration(64, post64) +} diff --git a/pkg/sqlite/migrations/65_movie_group_rename.up.sql b/pkg/sqlite/migrations/65_movie_group_rename.up.sql new file mode 100644 index 00000000000..d8b41955b4c --- /dev/null +++ b/pkg/sqlite/migrations/65_movie_group_rename.up.sql @@ -0,0 +1,24 @@ +ALTER TABLE `movies` RENAME TO `groups`; +ALTER TABLE `groups` RENAME COLUMN `synopsis` TO `description`; + +DROP INDEX `index_movies_on_name`; +CREATE INDEX `index_groups_on_name` ON `groups`(`name`); +DROP INDEX `index_movies_on_studio_id`; +CREATE INDEX `index_groups_on_studio_id` on `groups` (`studio_id`); + +ALTER TABLE `movie_urls` RENAME TO `group_urls`; +ALTER TABLE `group_urls` RENAME COLUMN `movie_id` TO `group_id`; + +DROP INDEX `movie_urls_url`; +CREATE INDEX `group_urls_url` on `group_urls` (`url`); + +ALTER TABLE `movies_tags` RENAME TO `groups_tags`; +ALTER TABLE `groups_tags` RENAME COLUMN `movie_id` TO `group_id`; + +DROP INDEX `index_movies_tags_on_tag_id`; +CREATE INDEX `index_groups_tags_on_tag_id` on `groups_tags` (`tag_id`); +DROP INDEX `index_movies_tags_on_movie_id`; +CREATE INDEX `index_groups_tags_on_movie_id` on `groups_tags` (`group_id`); + +ALTER TABLE `movies_scenes` RENAME TO `groups_scenes`; +ALTER TABLE `groups_scenes` RENAME COLUMN `movie_id` TO `group_id`; diff --git a/pkg/sqlite/migrations/66_gallery_cover.up.sql b/pkg/sqlite/migrations/66_gallery_cover.up.sql new file mode 100644 index 00000000000..7be80293a7d --- /dev/null +++ b/pkg/sqlite/migrations/66_gallery_cover.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE `galleries_images` ADD COLUMN `cover` BOOLEAN NOT NULL DEFAULT 0; +CREATE UNIQUE INDEX `index_galleries_images_gallery_id_cover` on `galleries_images` (`gallery_id`, `cover`) WHERE `cover` = 1; \ No newline at end of file diff --git a/pkg/sqlite/migrations/67_group_relationships.up.sql b/pkg/sqlite/migrations/67_group_relationships.up.sql new file mode 100644 index 00000000000..76ac29cc83f --- /dev/null +++ b/pkg/sqlite/migrations/67_group_relationships.up.sql @@ -0,0 +1,13 @@ +CREATE TABLE `groups_relations` ( + `containing_id` integer not null, + `sub_id` integer not null, + `order_index` integer not null, + `description` varchar(255), + primary key (`containing_id`, `sub_id`), + foreign key (`containing_id`) references `groups`(`id`) on delete cascade, + foreign key (`sub_id`) references `groups`(`id`) on delete cascade, + check (`containing_id` != `sub_id`) +); + +CREATE INDEX `index_groups_relations_sub_id` ON `groups_relations` (`sub_id`); +CREATE UNIQUE INDEX `index_groups_relations_order_index_unique` ON `groups_relations` (`containing_id`, `order_index`); diff --git a/pkg/sqlite/movies.go b/pkg/sqlite/movies.go deleted file mode 100644 index 0d7c429d0da..00000000000 --- a/pkg/sqlite/movies.go +++ /dev/null @@ -1,608 +0,0 @@ -package sqlite - -import ( - "context" - "database/sql" - "errors" - "fmt" - - "github.com/doug-martin/goqu/v9" - "github.com/doug-martin/goqu/v9/exp" - "github.com/jmoiron/sqlx" - "gopkg.in/guregu/null.v4" - "gopkg.in/guregu/null.v4/zero" - - "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil" -) - -const ( - movieTable = "movies" - movieIDColumn = "movie_id" - - movieFrontImageBlobColumn = "front_image_blob" - movieBackImageBlobColumn = "back_image_blob" -) - -type movieRow struct { - ID int `db:"id" goqu:"skipinsert"` - Name zero.String `db:"name"` - Aliases zero.String `db:"aliases"` - Duration null.Int `db:"duration"` - Date NullDate `db:"date"` - // expressed as 1-100 - Rating null.Int `db:"rating"` - StudioID null.Int `db:"studio_id,omitempty"` - Director zero.String `db:"director"` - Synopsis zero.String `db:"synopsis"` - URL zero.String `db:"url"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` - - // not used in resolutions or updates - FrontImageBlob zero.String `db:"front_image_blob"` - BackImageBlob zero.String `db:"back_image_blob"` -} - -func (r *movieRow) fromMovie(o models.Movie) { - r.ID = o.ID - r.Name = zero.StringFrom(o.Name) - r.Aliases = zero.StringFrom(o.Aliases) - r.Duration = intFromPtr(o.Duration) - r.Date = NullDateFromDatePtr(o.Date) - r.Rating = intFromPtr(o.Rating) - r.StudioID = intFromPtr(o.StudioID) - r.Director = zero.StringFrom(o.Director) - r.Synopsis = zero.StringFrom(o.Synopsis) - r.URL = zero.StringFrom(o.URL) - r.CreatedAt = Timestamp{Timestamp: o.CreatedAt} - r.UpdatedAt = Timestamp{Timestamp: o.UpdatedAt} -} - -func (r *movieRow) resolve() *models.Movie { - ret := &models.Movie{ - ID: r.ID, - Name: r.Name.String, - Aliases: r.Aliases.String, - Duration: nullIntPtr(r.Duration), - Date: r.Date.DatePtr(), - Rating: nullIntPtr(r.Rating), - StudioID: nullIntPtr(r.StudioID), - Director: r.Director.String, - Synopsis: r.Synopsis.String, - URL: r.URL.String, - CreatedAt: r.CreatedAt.Timestamp, - UpdatedAt: r.UpdatedAt.Timestamp, - } - - return ret -} - -type movieRowRecord struct { - updateRecord -} - -func (r *movieRowRecord) fromPartial(o models.MoviePartial) { - r.setNullString("name", o.Name) - r.setNullString("aliases", o.Aliases) - r.setNullInt("duration", o.Duration) - r.setNullDate("date", o.Date) - r.setNullInt("rating", o.Rating) - r.setNullInt("studio_id", o.StudioID) - r.setNullString("director", o.Director) - r.setNullString("synopsis", o.Synopsis) - r.setNullString("url", o.URL) - r.setTimestamp("created_at", o.CreatedAt) - r.setTimestamp("updated_at", o.UpdatedAt) -} - -type MovieStore struct { - repository - blobJoinQueryBuilder - - tableMgr *table -} - -func NewMovieStore(blobStore *BlobStore) *MovieStore { - return &MovieStore{ - repository: repository{ - tableName: movieTable, - idColumn: idColumn, - }, - blobJoinQueryBuilder: blobJoinQueryBuilder{ - blobStore: blobStore, - joinTable: movieTable, - }, - - tableMgr: movieTableMgr, - } -} - -func (qb *MovieStore) table() exp.IdentifierExpression { - return qb.tableMgr.table -} - -func (qb *MovieStore) selectDataset() *goqu.SelectDataset { - return dialect.From(qb.table()).Select(qb.table().All()) -} - -func (qb *MovieStore) Create(ctx context.Context, newObject *models.Movie) error { - var r movieRow - r.fromMovie(*newObject) - - id, err := qb.tableMgr.insertID(ctx, r) - if err != nil { - return err - } - - updated, err := qb.find(ctx, id) - if err != nil { - return fmt.Errorf("finding after create: %w", err) - } - - *newObject = *updated - - return nil -} - -func (qb *MovieStore) UpdatePartial(ctx context.Context, id int, partial models.MoviePartial) (*models.Movie, error) { - r := movieRowRecord{ - updateRecord{ - Record: make(exp.Record), - }, - } - - r.fromPartial(partial) - - if len(r.Record) > 0 { - if err := qb.tableMgr.updateByID(ctx, id, r.Record); err != nil { - return nil, err - } - } - - return qb.find(ctx, id) -} - -func (qb *MovieStore) Update(ctx context.Context, updatedObject *models.Movie) error { - var r movieRow - r.fromMovie(*updatedObject) - - if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { - return err - } - - return nil -} - -func (qb *MovieStore) Destroy(ctx context.Context, id int) error { - // must handle image checksums manually - if err := qb.destroyImages(ctx, id); err != nil { - return err - } - - return qb.destroyExisting(ctx, []int{id}) -} - -// returns nil, nil if not found -func (qb *MovieStore) Find(ctx context.Context, id int) (*models.Movie, error) { - ret, err := qb.find(ctx, id) - if errors.Is(err, sql.ErrNoRows) { - return nil, nil - } - return ret, err -} - -func (qb *MovieStore) FindMany(ctx context.Context, ids []int) ([]*models.Movie, error) { - ret := make([]*models.Movie, len(ids)) - - table := qb.table() - if err := batchExec(ids, defaultBatchSize, func(batch []int) error { - q := qb.selectDataset().Prepared(true).Where(table.Col(idColumn).In(batch)) - unsorted, err := qb.getMany(ctx, q) - if err != nil { - return err - } - - for _, s := range unsorted { - i := sliceutil.Index(ids, s.ID) - ret[i] = s - } - - return nil - }); err != nil { - return nil, err - } - - for i := range ret { - if ret[i] == nil { - return nil, fmt.Errorf("movie with id %d not found", ids[i]) - } - } - - return ret, nil -} - -// returns nil, sql.ErrNoRows if not found -func (qb *MovieStore) find(ctx context.Context, id int) (*models.Movie, error) { - q := qb.selectDataset().Where(qb.tableMgr.byID(id)) - - ret, err := qb.get(ctx, q) - if err != nil { - return nil, err - } - - return ret, nil -} - -// returns nil, sql.ErrNoRows if not found -func (qb *MovieStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Movie, error) { - ret, err := qb.getMany(ctx, q) - if err != nil { - return nil, err - } - - if len(ret) == 0 { - return nil, sql.ErrNoRows - } - - return ret[0], nil -} - -func (qb *MovieStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Movie, error) { - const single = false - var ret []*models.Movie - if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { - var f movieRow - if err := r.StructScan(&f); err != nil { - return err - } - - s := f.resolve() - - ret = append(ret, s) - return nil - }); err != nil { - return nil, err - } - - return ret, nil -} - -func (qb *MovieStore) FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) { - // query := "SELECT * FROM movies WHERE name = ?" - // if nocase { - // query += " COLLATE NOCASE" - // } - // query += " LIMIT 1" - where := "name = ?" - if nocase { - where += " COLLATE NOCASE" - } - sq := qb.selectDataset().Prepared(true).Where(goqu.L(where, name)).Limit(1) - ret, err := qb.get(ctx, sq) - - if err != nil && !errors.Is(err, sql.ErrNoRows) { - return nil, err - } - - return ret, nil -} - -func (qb *MovieStore) FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Movie, error) { - // query := "SELECT * FROM movies WHERE name" - // if nocase { - // query += " COLLATE NOCASE" - // } - // query += " IN " + getInBinding(len(names)) - where := "name" - if nocase { - where += " COLLATE NOCASE" - } - where += " IN " + getInBinding(len(names)) - var args []interface{} - for _, name := range names { - args = append(args, name) - } - sq := qb.selectDataset().Prepared(true).Where(goqu.L(where, args...)) - ret, err := qb.getMany(ctx, sq) - - if err != nil { - return nil, err - } - - return ret, nil -} - -func (qb *MovieStore) Count(ctx context.Context) (int, error) { - q := dialect.Select(goqu.COUNT("*")).From(qb.table()) - return count(ctx, q) -} - -func (qb *MovieStore) All(ctx context.Context) ([]*models.Movie, error) { - table := qb.table() - - return qb.getMany(ctx, qb.selectDataset().Order( - table.Col("name").Asc(), - table.Col(idColumn).Asc(), - )) -} - -func (qb *MovieStore) makeFilter(ctx context.Context, movieFilter *models.MovieFilterType) *filterBuilder { - query := &filterBuilder{} - - query.handleCriterion(ctx, stringCriterionHandler(movieFilter.Name, "movies.name")) - query.handleCriterion(ctx, stringCriterionHandler(movieFilter.Director, "movies.director")) - query.handleCriterion(ctx, stringCriterionHandler(movieFilter.Synopsis, "movies.synopsis")) - query.handleCriterion(ctx, intCriterionHandler(movieFilter.Rating100, "movies.rating", nil)) - query.handleCriterion(ctx, floatIntCriterionHandler(movieFilter.Duration, "movies.duration", nil)) - query.handleCriterion(ctx, movieIsMissingCriterionHandler(qb, movieFilter.IsMissing)) - query.handleCriterion(ctx, stringCriterionHandler(movieFilter.URL, "movies.url")) - query.handleCriterion(ctx, studioCriterionHandler(movieTable, movieFilter.Studios)) - query.handleCriterion(ctx, moviePerformersCriterionHandler(qb, movieFilter.Performers)) - query.handleCriterion(ctx, dateCriterionHandler(movieFilter.Date, "movies.date")) - query.handleCriterion(ctx, timestampCriterionHandler(movieFilter.CreatedAt, "movies.created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(movieFilter.UpdatedAt, "movies.updated_at")) - - return query -} - -func (qb *MovieStore) makeQuery(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { - if findFilter == nil { - findFilter = &models.FindFilterType{} - } - if movieFilter == nil { - movieFilter = &models.MovieFilterType{} - } - - query := qb.newQuery() - distinctIDs(&query, movieTable) - - if q := findFilter.Q; q != nil && *q != "" { - searchColumns := []string{"movies.name", "movies.aliases"} - query.parseQueryString(searchColumns, *q) - } - - filter := qb.makeFilter(ctx, movieFilter) - - if err := query.addFilter(filter); err != nil { - return nil, err - } - - var err error - query.sortAndPagination, err = qb.getMovieSort(findFilter) - if err != nil { - return nil, err - } - - query.sortAndPagination += getPagination(findFilter) - - return &query, nil -} - -func (qb *MovieStore) Query(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) { - query, err := qb.makeQuery(ctx, movieFilter, findFilter) - if err != nil { - return nil, 0, err - } - - idsResult, countResult, err := query.executeFind(ctx) - if err != nil { - return nil, 0, err - } - - movies, err := qb.FindMany(ctx, idsResult) - if err != nil { - return nil, 0, err - } - - return movies, countResult, nil -} - -func (qb *MovieStore) QueryCount(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) (int, error) { - query, err := qb.makeQuery(ctx, movieFilter, findFilter) - if err != nil { - return 0, err - } - - return query.executeCount(ctx) -} - -func movieIsMissingCriterionHandler(qb *MovieStore, isMissing *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if isMissing != nil && *isMissing != "" { - switch *isMissing { - case "front_image": - f.addWhere("movies.front_image_blob IS NULL") - case "back_image": - f.addWhere("movies.back_image_blob IS NULL") - case "scenes": - f.addLeftJoin("movies_scenes", "", "movies_scenes.movie_id = movies.id") - f.addWhere("movies_scenes.scene_id IS NULL") - default: - f.addWhere("(movies." + *isMissing + " IS NULL OR TRIM(movies." + *isMissing + ") = '')") - } - } - } -} - -func moviePerformersCriterionHandler(qb *MovieStore, performers *models.MultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performers != nil { - if performers.Modifier == models.CriterionModifierIsNull || performers.Modifier == models.CriterionModifierNotNull { - var notClause string - if performers.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin("movies_scenes", "", "movies.id = movies_scenes.movie_id") - f.addLeftJoin("performers_scenes", "", "movies_scenes.scene_id = performers_scenes.scene_id") - - f.addWhere(fmt.Sprintf("performers_scenes.performer_id IS %s NULL", notClause)) - return - } - - if len(performers.Value) == 0 { - return - } - - var args []interface{} - for _, arg := range performers.Value { - args = append(args, arg) - } - - // Hack, can't apply args to join, nor inner join on a left join, so use CTE instead - f.addWith(`movies_performers AS ( - SELECT movies_scenes.movie_id, performers_scenes.performer_id - FROM movies_scenes - INNER JOIN performers_scenes ON movies_scenes.scene_id = performers_scenes.scene_id - WHERE performers_scenes.performer_id IN`+getInBinding(len(performers.Value))+` - )`, args...) - f.addLeftJoin("movies_performers", "", "movies.id = movies_performers.movie_id") - - switch performers.Modifier { - case models.CriterionModifierIncludes: - f.addWhere("movies_performers.performer_id IS NOT NULL") - case models.CriterionModifierIncludesAll: - f.addWhere("movies_performers.performer_id IS NOT NULL") - f.addHaving("COUNT(DISTINCT movies_performers.performer_id) = ?", len(performers.Value)) - case models.CriterionModifierExcludes: - f.addWhere("movies_performers.performer_id IS NULL") - } - } - } -} - -var movieSortOptions = sortOptions{ - "created_at", - "date", - "duration", - "id", - "name", - "random", - "rating", - "scenes_count", - "updated_at", -} - -func (qb *MovieStore) getMovieSort(findFilter *models.FindFilterType) (string, error) { - var sort string - var direction string - if findFilter == nil { - sort = "name" - direction = "ASC" - } else { - sort = findFilter.GetSort("name") - direction = findFilter.GetDirection() - } - - // CVE-2024-32231 - ensure sort is in the list of allowed sorts - if err := movieSortOptions.validateSort(sort); err != nil { - return "", err - } - - sortQuery := "" - switch sort { - case "scenes_count": // generic getSort won't work for this - sortQuery += getCountSort(movieTable, moviesScenesTable, movieIDColumn, direction) - default: - sortQuery += getSort(sort, direction, "movies") - } - - // Whatever the sorting, always use name/id as a final sort - sortQuery += ", COALESCE(movies.name, movies.id) COLLATE NATURAL_CI ASC" - return sortQuery, nil -} - -func (qb *MovieStore) queryMovies(ctx context.Context, query string, args []interface{}) ([]*models.Movie, error) { - const single = false - var ret []*models.Movie - if err := qb.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { - var f movieRow - if err := r.StructScan(&f); err != nil { - return err - } - - s := f.resolve() - - ret = append(ret, s) - return nil - }); err != nil { - return nil, err - } - - return ret, nil -} - -func (qb *MovieStore) UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error { - return qb.UpdateImage(ctx, movieID, movieFrontImageBlobColumn, frontImage) -} - -func (qb *MovieStore) UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error { - return qb.UpdateImage(ctx, movieID, movieBackImageBlobColumn, backImage) -} - -func (qb *MovieStore) destroyImages(ctx context.Context, movieID int) error { - if err := qb.DestroyImage(ctx, movieID, movieFrontImageBlobColumn); err != nil { - return err - } - if err := qb.DestroyImage(ctx, movieID, movieBackImageBlobColumn); err != nil { - return err - } - - return nil -} - -func (qb *MovieStore) GetFrontImage(ctx context.Context, movieID int) ([]byte, error) { - return qb.GetImage(ctx, movieID, movieFrontImageBlobColumn) -} - -func (qb *MovieStore) HasFrontImage(ctx context.Context, movieID int) (bool, error) { - return qb.HasImage(ctx, movieID, movieFrontImageBlobColumn) -} - -func (qb *MovieStore) GetBackImage(ctx context.Context, movieID int) ([]byte, error) { - return qb.GetImage(ctx, movieID, movieBackImageBlobColumn) -} - -func (qb *MovieStore) HasBackImage(ctx context.Context, movieID int) (bool, error) { - return qb.HasImage(ctx, movieID, movieBackImageBlobColumn) -} - -func (qb *MovieStore) FindByPerformerID(ctx context.Context, performerID int) ([]*models.Movie, error) { - query := `SELECT DISTINCT movies.* -FROM movies -INNER JOIN movies_scenes ON movies.id = movies_scenes.movie_id -INNER JOIN performers_scenes ON performers_scenes.scene_id = movies_scenes.scene_id -WHERE performers_scenes.performer_id = ? -` - args := []interface{}{performerID} - return qb.queryMovies(ctx, query, args) -} - -func (qb *MovieStore) CountByPerformerID(ctx context.Context, performerID int) (int, error) { - query := `SELECT COUNT(DISTINCT movies_scenes.movie_id) AS count -FROM movies_scenes -INNER JOIN performers_scenes ON performers_scenes.scene_id = movies_scenes.scene_id -WHERE performers_scenes.performer_id = ? -` - args := []interface{}{performerID} - return qb.runCountQuery(ctx, query, args) -} - -func (qb *MovieStore) FindByStudioID(ctx context.Context, studioID int) ([]*models.Movie, error) { - query := `SELECT movies.* -FROM movies -WHERE movies.studio_id = ? -` - args := []interface{}{studioID} - return qb.queryMovies(ctx, query, args) -} - -func (qb *MovieStore) CountByStudioID(ctx context.Context, studioID int) (int, error) { - query := `SELECT COUNT(1) AS count -FROM movies -WHERE movies.studio_id = ? -` - args := []interface{}{studioID} - return qb.runCountQuery(ctx, query, args) -} diff --git a/pkg/sqlite/movies_test.go b/pkg/sqlite/movies_test.go deleted file mode 100644 index 9b9615fbd90..00000000000 --- a/pkg/sqlite/movies_test.go +++ /dev/null @@ -1,332 +0,0 @@ -//go:build integration -// +build integration - -package sqlite_test - -import ( - "context" - "fmt" - "strconv" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/stashapp/stash/pkg/models" -) - -func TestMovieFindByName(t *testing.T) { - withTxn(func(ctx context.Context) error { - mqb := db.Movie - - name := movieNames[movieIdxWithScene] // find a movie by name - - movie, err := mqb.FindByName(ctx, name, false) - - if err != nil { - t.Errorf("Error finding movies: %s", err.Error()) - } - - assert.Equal(t, movieNames[movieIdxWithScene], movie.Name) - - name = movieNames[movieIdxWithDupName] // find a movie by name nocase - - movie, err = mqb.FindByName(ctx, name, true) - - if err != nil { - t.Errorf("Error finding movies: %s", err.Error()) - } - // movieIdxWithDupName and movieIdxWithScene should have similar names ( only diff should be Name vs NaMe) - //movie.Name should match with movieIdxWithScene since its ID is before moveIdxWithDupName - assert.Equal(t, movieNames[movieIdxWithScene], movie.Name) - //movie.Name should match with movieIdxWithDupName if the check is not case sensitive - assert.Equal(t, strings.ToLower(movieNames[movieIdxWithDupName]), strings.ToLower(movie.Name)) - - return nil - }) -} - -func TestMovieFindByNames(t *testing.T) { - withTxn(func(ctx context.Context) error { - var names []string - - mqb := db.Movie - - names = append(names, movieNames[movieIdxWithScene]) // find movies by names - - movies, err := mqb.FindByNames(ctx, names, false) - if err != nil { - t.Errorf("Error finding movies: %s", err.Error()) - } - assert.Len(t, movies, 1) - assert.Equal(t, movieNames[movieIdxWithScene], movies[0].Name) - - movies, err = mqb.FindByNames(ctx, names, true) // find movies by names nocase - if err != nil { - t.Errorf("Error finding movies: %s", err.Error()) - } - assert.Len(t, movies, 2) // movieIdxWithScene and movieIdxWithDupName - assert.Equal(t, strings.ToLower(movieNames[movieIdxWithScene]), strings.ToLower(movies[0].Name)) - assert.Equal(t, strings.ToLower(movieNames[movieIdxWithScene]), strings.ToLower(movies[1].Name)) - - return nil - }) -} - -func moviesToIDs(i []*models.Movie) []int { - ret := make([]int, len(i)) - for i, v := range i { - ret[i] = v.ID - } - - return ret -} - -func TestMovieQuery(t *testing.T) { - var ( - frontImage = "front_image" - backImage = "back_image" - ) - - tests := []struct { - name string - findFilter *models.FindFilterType - filter *models.MovieFilterType - includeIdxs []int - excludeIdxs []int - wantErr bool - }{ - { - "is missing front image", - nil, - &models.MovieFilterType{ - IsMissing: &frontImage, - }, - // just ensure that it doesn't error - nil, - nil, - false, - }, - { - "is missing back image", - nil, - &models.MovieFilterType{ - IsMissing: &backImage, - }, - // just ensure that it doesn't error - nil, - nil, - false, - }, - } - - for _, tt := range tests { - runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { - assert := assert.New(t) - - results, _, err := db.Movie.Query(ctx, tt.filter, tt.findFilter) - if (err != nil) != tt.wantErr { - t.Errorf("MovieQueryBuilder.Query() error = %v, wantErr %v", err, tt.wantErr) - return - } - - ids := moviesToIDs(results) - include := indexesToIDs(performerIDs, tt.includeIdxs) - exclude := indexesToIDs(performerIDs, tt.excludeIdxs) - - for _, i := range include { - assert.Contains(ids, i) - } - for _, e := range exclude { - assert.NotContains(ids, e) - } - }) - } -} - -func TestMovieQueryStudio(t *testing.T) { - withTxn(func(ctx context.Context) error { - mqb := db.Movie - studioCriterion := models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(studioIDs[studioIdxWithMovie]), - }, - Modifier: models.CriterionModifierIncludes, - } - - movieFilter := models.MovieFilterType{ - Studios: &studioCriterion, - } - - movies, _, err := mqb.Query(ctx, &movieFilter, nil) - if err != nil { - t.Errorf("Error querying movie: %s", err.Error()) - } - - assert.Len(t, movies, 1) - - // ensure id is correct - assert.Equal(t, movieIDs[movieIdxWithStudio], movies[0].ID) - - studioCriterion = models.HierarchicalMultiCriterionInput{ - Value: []string{ - strconv.Itoa(studioIDs[studioIdxWithMovie]), - }, - Modifier: models.CriterionModifierExcludes, - } - - q := getMovieStringValue(movieIdxWithStudio, titleField) - findFilter := models.FindFilterType{ - Q: &q, - } - - movies, _, err = mqb.Query(ctx, &movieFilter, &findFilter) - if err != nil { - t.Errorf("Error querying movie: %s", err.Error()) - } - assert.Len(t, movies, 0) - - return nil - }) -} - -func TestMovieQueryURL(t *testing.T) { - const sceneIdx = 1 - movieURL := getMovieStringValue(sceneIdx, urlField) - - urlCriterion := models.StringCriterionInput{ - Value: movieURL, - Modifier: models.CriterionModifierEquals, - } - - filter := models.MovieFilterType{ - URL: &urlCriterion, - } - - verifyFn := func(n *models.Movie) { - t.Helper() - verifyString(t, n.URL, urlCriterion) - } - - verifyMovieQuery(t, filter, verifyFn) - - urlCriterion.Modifier = models.CriterionModifierNotEquals - verifyMovieQuery(t, filter, verifyFn) - - urlCriterion.Modifier = models.CriterionModifierMatchesRegex - urlCriterion.Value = "movie_.*1_URL" - verifyMovieQuery(t, filter, verifyFn) - - urlCriterion.Modifier = models.CriterionModifierNotMatchesRegex - verifyMovieQuery(t, filter, verifyFn) - - urlCriterion.Modifier = models.CriterionModifierIsNull - urlCriterion.Value = "" - verifyMovieQuery(t, filter, verifyFn) - - urlCriterion.Modifier = models.CriterionModifierNotNull - verifyMovieQuery(t, filter, verifyFn) -} - -func verifyMovieQuery(t *testing.T, filter models.MovieFilterType, verifyFn func(s *models.Movie)) { - withTxn(func(ctx context.Context) error { - t.Helper() - sqb := db.Movie - - movies := queryMovie(ctx, t, sqb, &filter, nil) - - // assume it should find at least one - assert.Greater(t, len(movies), 0) - - for _, m := range movies { - verifyFn(m) - } - - return nil - }) -} - -func queryMovie(ctx context.Context, t *testing.T, sqb models.MovieReader, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) []*models.Movie { - movies, _, err := sqb.Query(ctx, movieFilter, findFilter) - if err != nil { - t.Errorf("Error querying movie: %s", err.Error()) - } - - return movies -} - -func TestMovieQuerySorting(t *testing.T) { - sort := "scenes_count" - direction := models.SortDirectionEnumDesc - findFilter := models.FindFilterType{ - Sort: &sort, - Direction: &direction, - } - - withTxn(func(ctx context.Context) error { - sqb := db.Movie - movies := queryMovie(ctx, t, sqb, nil, &findFilter) - - // scenes should be in same order as indexes - firstMovie := movies[0] - - assert.Equal(t, movieIDs[movieIdxWithScene], firstMovie.ID) - - // sort in descending order - direction = models.SortDirectionEnumAsc - - movies = queryMovie(ctx, t, sqb, nil, &findFilter) - lastMovie := movies[len(movies)-1] - - assert.Equal(t, movieIDs[movieIdxWithScene], lastMovie.ID) - - return nil - }) -} - -func TestMovieUpdateFrontImage(t *testing.T) { - if err := withRollbackTxn(func(ctx context.Context) error { - qb := db.Movie - - // create movie to test against - const name = "TestMovieUpdateMovieImages" - movie := models.Movie{ - Name: name, - } - err := qb.Create(ctx, &movie) - if err != nil { - return fmt.Errorf("Error creating movie: %s", err.Error()) - } - - return testUpdateImage(t, ctx, movie.ID, qb.UpdateFrontImage, qb.GetFrontImage) - }); err != nil { - t.Error(err.Error()) - } -} - -func TestMovieUpdateBackImage(t *testing.T) { - if err := withRollbackTxn(func(ctx context.Context) error { - qb := db.Movie - - // create movie to test against - const name = "TestMovieUpdateMovieImages" - movie := models.Movie{ - Name: name, - } - err := qb.Create(ctx, &movie) - if err != nil { - return fmt.Errorf("Error creating movie: %s", err.Error()) - } - - return testUpdateImage(t, ctx, movie.ID, qb.UpdateBackImage, qb.GetBackImage) - }); err != nil { - t.Error(err.Error()) - } -} - -// TODO Update -// TODO Destroy - ensure image is destroyed -// TODO Find -// TODO Count -// TODO All -// TODO Query diff --git a/pkg/sqlite/performer.go b/pkg/sqlite/performer.go index dcdc92f0f0e..7ff6f5401a0 100644 --- a/pkg/sqlite/performer.go +++ b/pkg/sqlite/performer.go @@ -5,8 +5,6 @@ import ( "database/sql" "errors" "fmt" - "strconv" - "strings" "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" @@ -25,6 +23,9 @@ const ( performerAliasColumn = "alias" performersTagsTable = "performers_tags" + performerURLsTable = "performer_urls" + performerURLColumn = "url" + performerImageBlobColumn = "image_blob" ) @@ -33,9 +34,6 @@ type performerRow struct { Name null.String `db:"name"` // TODO: make schema non-nullable Disambigation zero.String `db:"disambiguation"` Gender zero.String `db:"gender"` - URL zero.String `db:"url"` - Twitter zero.String `db:"twitter"` - Instagram zero.String `db:"instagram"` Birthdate NullDate `db:"birthdate"` Ethnicity zero.String `db:"ethnicity"` Country zero.String `db:"country"` @@ -70,9 +68,6 @@ func (r *performerRow) fromPerformer(o models.Performer) { if o.Gender != nil && o.Gender.IsValid() { r.Gender = zero.StringFrom(o.Gender.String()) } - r.URL = zero.StringFrom(o.URL) - r.Twitter = zero.StringFrom(o.Twitter) - r.Instagram = zero.StringFrom(o.Instagram) r.Birthdate = NullDateFromDatePtr(o.Birthdate) r.Ethnicity = zero.StringFrom(o.Ethnicity) r.Country = zero.StringFrom(o.Country) @@ -103,9 +98,6 @@ func (r *performerRow) resolve() *models.Performer { ID: r.ID, Name: r.Name.String, Disambiguation: r.Disambigation.String, - URL: r.URL.String, - Twitter: r.Twitter.String, - Instagram: r.Instagram.String, Birthdate: r.Birthdate.DatePtr(), Ethnicity: r.Ethnicity.String, Country: r.Country.String, @@ -150,9 +142,6 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) { r.setString("name", o.Name) r.setNullString("disambiguation", o.Disambiguation) r.setNullString("gender", o.Gender) - r.setNullString("url", o.URL) - r.setNullString("twitter", o.Twitter) - r.setNullString("instagram", o.Instagram) r.setNullDate("birthdate", o.Birthdate) r.setNullString("ethnicity", o.Ethnicity) r.setNullString("country", o.Country) @@ -176,19 +165,73 @@ func (r *performerRowRecord) fromPartial(o models.PerformerPartial) { r.setBool("ignore_auto_tag", o.IgnoreAutoTag) } -type PerformerStore struct { +type performerRepositoryType struct { repository - blobJoinQueryBuilder - tableMgr *table + tags joinRepository + stashIDs stashIDRepository + + scenes joinRepository + images joinRepository + galleries joinRepository } -func NewPerformerStore(blobStore *BlobStore) *PerformerStore { - return &PerformerStore{ +var ( + performerRepository = performerRepositoryType{ repository: repository{ tableName: performerTable, idColumn: idColumn, }, + tags: joinRepository{ + repository: repository{ + tableName: performersTagsTable, + idColumn: performerIDColumn, + }, + fkColumn: tagIDColumn, + foreignTable: tagTable, + orderBy: "tags.name ASC", + }, + stashIDs: stashIDRepository{ + repository{ + tableName: "performer_stash_ids", + idColumn: performerIDColumn, + }, + }, + scenes: joinRepository{ + repository: repository{ + tableName: performersScenesTable, + idColumn: performerIDColumn, + }, + fkColumn: sceneIDColumn, + foreignTable: sceneTable, + }, + images: joinRepository{ + repository: repository{ + tableName: performersImagesTable, + idColumn: performerIDColumn, + }, + fkColumn: imageIDColumn, + foreignTable: imageTable, + }, + galleries: joinRepository{ + repository: repository{ + tableName: performersGalleriesTable, + idColumn: performerIDColumn, + }, + fkColumn: galleryIDColumn, + foreignTable: galleryTable, + }, + } +) + +type PerformerStore struct { + blobJoinQueryBuilder + + tableMgr *table +} + +func NewPerformerStore(blobStore *BlobStore) *PerformerStore { + return &PerformerStore{ blobJoinQueryBuilder: blobJoinQueryBuilder{ blobStore: blobStore, joinTable: performerTable, @@ -220,6 +263,13 @@ func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performe } } + if newObject.URLs.Loaded() { + const startPos = 0 + if err := performersURLsTableMgr.insertJoins(ctx, id, startPos, newObject.URLs.List()); err != nil { + return err + } + } + if newObject.TagIDs.Loaded() { if err := performersTagsTableMgr.insertJoins(ctx, id, newObject.TagIDs.List()); err != nil { return err @@ -263,6 +313,12 @@ func (qb *PerformerStore) UpdatePartial(ctx context.Context, id int, partial mod } } + if partial.URLs != nil { + if err := performersURLsTableMgr.modifyJoins(ctx, id, partial.URLs.Values, partial.URLs.Mode); err != nil { + return nil, err + } + } + if partial.TagIDs != nil { if err := performersTagsTableMgr.modifyJoins(ctx, id, partial.TagIDs.IDs, partial.TagIDs.Mode); err != nil { return nil, err @@ -291,6 +347,12 @@ func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Perf } } + if updatedObject.URLs.Loaded() { + if err := performersURLsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.URLs.List()); err != nil { + return err + } + } + if updatedObject.TagIDs.Loaded() { if err := performersTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.TagIDs.List()); err != nil { return err @@ -312,7 +374,7 @@ func (qb *PerformerStore) Destroy(ctx context.Context, id int) error { return err } - return qb.destroyExisting(ctx, []int{id}) + return performerRepository.destroyExisting(ctx, []int{id}) } // returns nil, nil if not found @@ -525,161 +587,6 @@ func (qb *PerformerStore) QueryForAutoTag(ctx context.Context, words []string) ( return ret, nil } -func (qb *PerformerStore) validateFilter(filter *models.PerformerFilterType) error { - const and = "AND" - const or = "OR" - const not = "NOT" - - if filter.And != nil { - if filter.Or != nil { - return illegalFilterCombination(and, or) - } - if filter.Not != nil { - return illegalFilterCombination(and, not) - } - - return qb.validateFilter(filter.And) - } - - if filter.Or != nil { - if filter.Not != nil { - return illegalFilterCombination(or, not) - } - - return qb.validateFilter(filter.Or) - } - - if filter.Not != nil { - return qb.validateFilter(filter.Not) - } - - // if legacy height filter used, ensure only supported modifiers are used - if filter.Height != nil { - // treat as an int filter - intCrit := &models.IntCriterionInput{ - Modifier: filter.Height.Modifier, - } - if !intCrit.ValidModifier() { - return fmt.Errorf("invalid height modifier: %s", filter.Height.Modifier) - } - - // ensure value is a valid number - if _, err := strconv.Atoi(filter.Height.Value); err != nil { - return fmt.Errorf("invalid height value: %s", filter.Height.Value) - } - } - - return nil -} - -func (qb *PerformerStore) makeFilter(ctx context.Context, filter *models.PerformerFilterType) *filterBuilder { - query := &filterBuilder{} - - if filter.And != nil { - query.and(qb.makeFilter(ctx, filter.And)) - } - if filter.Or != nil { - query.or(qb.makeFilter(ctx, filter.Or)) - } - if filter.Not != nil { - query.not(qb.makeFilter(ctx, filter.Not)) - } - - const tableName = performerTable - query.handleCriterion(ctx, stringCriterionHandler(filter.Name, tableName+".name")) - query.handleCriterion(ctx, stringCriterionHandler(filter.Disambiguation, tableName+".disambiguation")) - query.handleCriterion(ctx, stringCriterionHandler(filter.Details, tableName+".details")) - - query.handleCriterion(ctx, boolCriterionHandler(filter.FilterFavorites, tableName+".favorite", nil)) - query.handleCriterion(ctx, boolCriterionHandler(filter.IgnoreAutoTag, tableName+".ignore_auto_tag", nil)) - - query.handleCriterion(ctx, yearFilterCriterionHandler(filter.BirthYear, tableName+".birthdate")) - query.handleCriterion(ctx, yearFilterCriterionHandler(filter.DeathYear, tableName+".death_date")) - - query.handleCriterion(ctx, performerAgeFilterCriterionHandler(filter.Age)) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if gender := filter.Gender; gender != nil { - genderCopy := *gender - if genderCopy.Value.IsValid() && len(genderCopy.ValueList) == 0 { - genderCopy.ValueList = []models.GenderEnum{genderCopy.Value} - } - - v := utils.StringerSliceToStringSlice(genderCopy.ValueList) - enumCriterionHandler(genderCopy.Modifier, v, tableName+".gender")(ctx, f) - } - })) - - query.handleCriterion(ctx, performerIsMissingCriterionHandler(qb, filter.IsMissing)) - query.handleCriterion(ctx, stringCriterionHandler(filter.Ethnicity, tableName+".ethnicity")) - query.handleCriterion(ctx, stringCriterionHandler(filter.Country, tableName+".country")) - query.handleCriterion(ctx, stringCriterionHandler(filter.EyeColor, tableName+".eye_color")) - - // special handler for legacy height filter - heightCmCrit := filter.HeightCm - if heightCmCrit == nil && filter.Height != nil { - heightCm, _ := strconv.Atoi(filter.Height.Value) // already validated - heightCmCrit = &models.IntCriterionInput{ - Value: heightCm, - Modifier: filter.Height.Modifier, - } - } - - query.handleCriterion(ctx, intCriterionHandler(heightCmCrit, tableName+".height", nil)) - - query.handleCriterion(ctx, stringCriterionHandler(filter.Measurements, tableName+".measurements")) - query.handleCriterion(ctx, stringCriterionHandler(filter.FakeTits, tableName+".fake_tits")) - query.handleCriterion(ctx, floatCriterionHandler(filter.PenisLength, tableName+".penis_length", nil)) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if circumcised := filter.Circumcised; circumcised != nil { - v := utils.StringerSliceToStringSlice(circumcised.Value) - enumCriterionHandler(circumcised.Modifier, v, tableName+".circumcised")(ctx, f) - } - })) - - query.handleCriterion(ctx, stringCriterionHandler(filter.CareerLength, tableName+".career_length")) - query.handleCriterion(ctx, stringCriterionHandler(filter.Tattoos, tableName+".tattoos")) - query.handleCriterion(ctx, stringCriterionHandler(filter.Piercings, tableName+".piercings")) - query.handleCriterion(ctx, intCriterionHandler(filter.Rating100, tableName+".rating", nil)) - query.handleCriterion(ctx, stringCriterionHandler(filter.HairColor, tableName+".hair_color")) - query.handleCriterion(ctx, stringCriterionHandler(filter.URL, tableName+".url")) - query.handleCriterion(ctx, intCriterionHandler(filter.Weight, tableName+".weight", nil)) - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if filter.StashID != nil { - qb.stashIDRepository().join(f, "performer_stash_ids", "performers.id") - stringCriterionHandler(filter.StashID, "performer_stash_ids.stash_id")(ctx, f) - } - })) - query.handleCriterion(ctx, &stashIDCriterionHandler{ - c: filter.StashIDEndpoint, - stashIDRepository: qb.stashIDRepository(), - stashIDTableAs: "performer_stash_ids", - parentIDCol: "performers.id", - }) - - query.handleCriterion(ctx, performerAliasCriterionHandler(qb, filter.Aliases)) - - query.handleCriterion(ctx, performerTagsCriterionHandler(qb, filter.Tags)) - - query.handleCriterion(ctx, performerStudiosCriterionHandler(qb, filter.Studios)) - - query.handleCriterion(ctx, performerAppearsWithCriterionHandler(qb, filter.Performers)) - - query.handleCriterion(ctx, performerTagCountCriterionHandler(qb, filter.TagCount)) - query.handleCriterion(ctx, performerSceneCountCriterionHandler(qb, filter.SceneCount)) - query.handleCriterion(ctx, performerImageCountCriterionHandler(qb, filter.ImageCount)) - query.handleCriterion(ctx, performerGalleryCountCriterionHandler(qb, filter.GalleryCount)) - query.handleCriterion(ctx, performerPlayCounterCriterionHandler(qb, filter.PlayCount)) - query.handleCriterion(ctx, performerOCounterCriterionHandler(qb, filter.OCounter)) - query.handleCriterion(ctx, dateCriterionHandler(filter.Birthdate, tableName+".birthdate")) - query.handleCriterion(ctx, dateCriterionHandler(filter.DeathDate, tableName+".death_date")) - query.handleCriterion(ctx, timestampCriterionHandler(filter.CreatedAt, tableName+".created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(filter.UpdatedAt, tableName+".updated_at")) - - return query -} - func (qb *PerformerStore) makeQuery(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { if performerFilter == nil { performerFilter = &models.PerformerFilterType{} @@ -688,7 +595,7 @@ func (qb *PerformerStore) makeQuery(ctx context.Context, performerFilter *models findFilter = &models.FindFilterType{} } - query := qb.newQuery() + query := performerRepository.newQuery() distinctIDs(&query, performerTable) if q := findFilter.Q; q != nil && *q != "" { @@ -697,10 +604,9 @@ func (qb *PerformerStore) makeQuery(ctx context.Context, performerFilter *models query.parseQueryString(searchColumns, *q) } - if err := qb.validateFilter(performerFilter); err != nil { - return nil, err - } - filter := qb.makeFilter(ctx, performerFilter) + filter := filterBuilderFromHandler(ctx, &performerFilterHandler{ + performerFilter: performerFilter, + }) if err := query.addFilter(filter); err != nil { return nil, err @@ -744,165 +650,16 @@ func (qb *PerformerStore) QueryCount(ctx context.Context, performerFilter *model return query.executeCount(ctx) } -// TODO - we need to provide a whitelist of possible values -func performerIsMissingCriterionHandler(qb *PerformerStore, isMissing *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if isMissing != nil && *isMissing != "" { - switch *isMissing { - case "scenes": // Deprecated: use `scene_count == 0` filter instead - f.addLeftJoin(performersScenesTable, "scenes_join", "scenes_join.performer_id = performers.id") - f.addWhere("scenes_join.scene_id IS NULL") - case "image": - f.addWhere("performers.image_blob IS NULL") - case "stash_id": - performersStashIDsTableMgr.join(f, "performer_stash_ids", "performers.id") - f.addWhere("performer_stash_ids.performer_id IS NULL") - case "aliases": - performersAliasesTableMgr.join(f, "", "performers.id") - f.addWhere("performer_aliases.alias IS NULL") - default: - f.addWhere("(performers." + *isMissing + " IS NULL OR TRIM(performers." + *isMissing + ") = '')") - } - } - } -} - -func yearFilterCriterionHandler(year *models.IntCriterionInput, col string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if year != nil && year.Modifier.IsValid() { - clause, args := getIntCriterionWhereClause("cast(strftime('%Y', "+col+") as int)", *year) - f.addWhere(clause, args...) - } - } -} - -func performerAgeFilterCriterionHandler(age *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if age != nil && age.Modifier.IsValid() { - clause, args := getIntCriterionWhereClause( - "cast(IFNULL(strftime('%Y.%m%d', performers.death_date), strftime('%Y.%m%d', 'now')) - strftime('%Y.%m%d', performers.birthdate) as int)", - *age, - ) - f.addWhere(clause, args...) - } - } -} - -func performerAliasCriterionHandler(qb *PerformerStore, alias *models.StringCriterionInput) criterionHandlerFunc { - h := stringListCriterionHandlerBuilder{ - joinTable: performersAliasesTable, - stringColumn: performerAliasColumn, - addJoinTable: func(f *filterBuilder) { - performersAliasesTableMgr.join(f, "", "performers.id") - }, - } - - return h.handler(alias) -} - -func performerTagsCriterionHandler(qb *PerformerStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := joinedHierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: performerTable, - foreignTable: tagTable, - foreignFK: "tag_id", - - relationsTable: "tags_relations", - joinAs: "image_tag", - joinTable: performersTagsTable, - primaryFK: performerIDColumn, - } - - return h.handler(tags) -} - -func performerTagCountCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: performerTable, - joinTable: performersTagsTable, - primaryFK: performerIDColumn, - } - - return h.handler(count) -} - -func performerSceneCountCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: performerTable, - joinTable: performersScenesTable, - primaryFK: performerIDColumn, - } - - return h.handler(count) -} - -func performerImageCountCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: performerTable, - joinTable: performersImagesTable, - primaryFK: performerIDColumn, - } - - return h.handler(count) +func (qb *PerformerStore) sortByOCounter(direction string) string { + // need to sum the o_counter from scenes and images + return " ORDER BY (" + selectPerformerOCountSQL + ") " + direction } -func performerGalleryCountCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: performerTable, - joinTable: performersGalleriesTable, - primaryFK: performerIDColumn, - } - - return h.handler(count) +func (qb *PerformerStore) sortByPlayCount(direction string) string { + // need to sum the o_counter from scenes and images + return " ORDER BY (" + selectPerformerPlayCountSQL + ") " + direction } -// used for sorting and filtering on performer o-count -var selectPerformerOCountSQL = utils.StrFormat( - "SELECT SUM(o_counter) "+ - "FROM ("+ - "SELECT SUM(o_counter) as o_counter from {performers_images} s "+ - "LEFT JOIN {images} ON {images}.id = s.{images_id} "+ - "WHERE s.{performer_id} = {performers}.id "+ - "UNION ALL "+ - "SELECT COUNT({scenes_o_dates}.{o_date}) as o_counter from {performers_scenes} s "+ - "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+ - "LEFT JOIN {scenes_o_dates} ON {scenes_o_dates}.{scene_id} = {scenes}.id "+ - "WHERE s.{performer_id} = {performers}.id "+ - ")", - map[string]interface{}{ - "performers_images": performersImagesTable, - "images": imageTable, - "performer_id": performerIDColumn, - "images_id": imageIDColumn, - "performers": performerTable, - "performers_scenes": performersScenesTable, - "scenes": sceneTable, - "scene_id": sceneIDColumn, - "scenes_o_dates": scenesODatesTable, - "o_date": sceneODateColumn, - }, -) - -// used for sorting and filtering play count on performer view count -var selectPerformerPlayCountSQL = utils.StrFormat( - "SELECT COUNT(DISTINCT {view_date}) FROM ("+ - "SELECT {view_date} FROM {performers_scenes} s "+ - "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+ - "LEFT JOIN {scenes_view_dates} ON {scenes_view_dates}.{scene_id} = {scenes}.id "+ - "WHERE s.{performer_id} = {performers}.id"+ - ")", - map[string]interface{}{ - "performer_id": performerIDColumn, - "performers": performerTable, - "performers_scenes": performersScenesTable, - "scenes": sceneTable, - "scene_id": sceneIDColumn, - "scenes_view_dates": scenesViewDatesTable, - "view_date": sceneViewDateColumn, - }, -) - // used for sorting on performer last o_date var selectPerformerLastOAtSQL = utils.StrFormat( "SELECT MAX(o_date) FROM ("+ @@ -922,6 +679,11 @@ var selectPerformerLastOAtSQL = utils.StrFormat( }, ) +func (qb *PerformerStore) sortByLastOAt(direction string) string { + // need to get the o_dates from scenes + return " ORDER BY (" + selectPerformerLastOAtSQL + ") " + direction +} + // used for sorting on performer last view_date var selectPerformerLastPlayedAtSQL = utils.StrFormat( "SELECT MAX(view_date) FROM ("+ @@ -941,182 +703,6 @@ var selectPerformerLastPlayedAtSQL = utils.StrFormat( }, ) -func performerOCounterCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if count == nil { - return - } - - lhs := "(" + selectPerformerOCountSQL + ")" - clause, args := getIntCriterionWhereClause(lhs, *count) - - f.addWhere(clause, args...) - } -} - -func performerPlayCounterCriterionHandler(qb *PerformerStore, count *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if count == nil { - return - } - - lhs := "(" + selectPerformerPlayCountSQL + ")" - clause, args := getIntCriterionWhereClause(lhs, *count) - - f.addWhere(clause, args...) - } -} - -func performerStudiosCriterionHandler(qb *PerformerStore, studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if studios != nil { - formatMaps := []utils.StrFormatMap{ - { - "primaryTable": sceneTable, - "joinTable": performersScenesTable, - "primaryFK": sceneIDColumn, - }, - { - "primaryTable": imageTable, - "joinTable": performersImagesTable, - "primaryFK": imageIDColumn, - }, - { - "primaryTable": galleryTable, - "joinTable": performersGalleriesTable, - "primaryFK": galleryIDColumn, - }, - } - - if studios.Modifier == models.CriterionModifierIsNull || studios.Modifier == models.CriterionModifierNotNull { - var notClause string - if studios.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - var conditions []string - for _, c := range formatMaps { - f.addLeftJoin(c["joinTable"].(string), "", fmt.Sprintf("%s.performer_id = performers.id", c["joinTable"])) - f.addLeftJoin(c["primaryTable"].(string), "", fmt.Sprintf("%s.%s = %s.id", c["joinTable"], c["primaryFK"], c["primaryTable"])) - - conditions = append(conditions, fmt.Sprintf("%s.studio_id IS NULL", c["primaryTable"])) - } - - f.addWhere(fmt.Sprintf("%s (%s)", notClause, strings.Join(conditions, " AND "))) - return - } - - if len(studios.Value) == 0 { - return - } - - var clauseCondition string - - switch studios.Modifier { - case models.CriterionModifierIncludes: - // return performers who appear in scenes/images/galleries with any of the given studios - clauseCondition = "NOT" - case models.CriterionModifierExcludes: - // exclude performers who appear in scenes/images/galleries with any of the given studios - clauseCondition = "" - default: - return - } - - const derivedPerformerStudioTable = "performer_studio" - valuesClause, err := getHierarchicalValues(ctx, qb.tx, studios.Value, studioTable, "", "parent_id", "child_id", studios.Depth) - if err != nil { - f.setError(err) - return - } - f.addWith("studio(root_id, item_id) AS (" + valuesClause + ")") - - templStr := `SELECT performer_id FROM {primaryTable} - INNER JOIN {joinTable} ON {primaryTable}.id = {joinTable}.{primaryFK} - INNER JOIN studio ON {primaryTable}.studio_id = studio.item_id` - - var unions []string - for _, c := range formatMaps { - unions = append(unions, utils.StrFormat(templStr, c)) - } - - f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerStudioTable, strings.Join(unions, " UNION "))) - - f.addLeftJoin(derivedPerformerStudioTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerStudioTable)) - f.addWhere(fmt.Sprintf("%s.performer_id IS %s NULL", derivedPerformerStudioTable, clauseCondition)) - } - } -} - -func performerAppearsWithCriterionHandler(qb *PerformerStore, performers *models.MultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performers != nil { - formatMaps := []utils.StrFormatMap{ - { - "primaryTable": performersScenesTable, - "joinTable": performersScenesTable, - "primaryFK": sceneIDColumn, - }, - { - "primaryTable": performersImagesTable, - "joinTable": performersImagesTable, - "primaryFK": imageIDColumn, - }, - { - "primaryTable": performersGalleriesTable, - "joinTable": performersGalleriesTable, - "primaryFK": galleryIDColumn, - }, - } - - if len(performers.Value) == '0' { - return - } - - const derivedPerformerPerformersTable = "performer_performers" - - valuesClause := strings.Join(performers.Value, "),(") - - f.addWith("performer(id) AS (VALUES(" + valuesClause + "))") - - templStr := `SELECT {primaryTable}2.performer_id FROM {primaryTable} - INNER JOIN {primaryTable} AS {primaryTable}2 ON {primaryTable}.{primaryFK} = {primaryTable}2.{primaryFK} - INNER JOIN performer ON {primaryTable}.performer_id = performer.id - WHERE {primaryTable}2.performer_id != performer.id` - - if performers.Modifier == models.CriterionModifierIncludesAll && len(performers.Value) > 1 { - templStr += ` - GROUP BY {primaryTable}2.performer_id - HAVING(count(distinct {primaryTable}.performer_id) IS ` + strconv.Itoa(len(performers.Value)) + `)` - } - - var unions []string - for _, c := range formatMaps { - unions = append(unions, utils.StrFormat(templStr, c)) - } - - f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerPerformersTable, strings.Join(unions, " UNION "))) - - f.addInnerJoin(derivedPerformerPerformersTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerPerformersTable)) - } - } -} - -func (qb *PerformerStore) sortByOCounter(direction string) string { - // need to sum the o_counter from scenes and images - return " ORDER BY (" + selectPerformerOCountSQL + ") " + direction -} - -func (qb *PerformerStore) sortByPlayCount(direction string) string { - // need to sum the o_counter from scenes and images - return " ORDER BY (" + selectPerformerPlayCountSQL + ") " + direction -} - -func (qb *PerformerStore) sortByLastOAt(direction string) string { - // need to get the o_dates from scenes - return " ORDER BY (" + selectPerformerLastOAtSQL + ") " + direction -} - func (qb *PerformerStore) sortByLastPlayedAt(direction string) string { // need to get the view_dates from scenes return " ORDER BY (" + selectPerformerLastPlayedAtSQL + ") " + direction @@ -1124,6 +710,7 @@ func (qb *PerformerStore) sortByLastPlayedAt(direction string) string { var performerSortOptions = sortOptions{ "birthdate", + "career_length", "created_at", "galleries_count", "height", @@ -1131,6 +718,7 @@ var performerSortOptions = sortOptions{ "images_count", "last_o_at", "last_played_at", + "measurements", "name", "o_counter", "penis_length", @@ -1140,6 +728,7 @@ var performerSortOptions = sortOptions{ "scenes_count", "tag_count", "updated_at", + "weight", } func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) (string, error) { @@ -1185,21 +774,8 @@ func (qb *PerformerStore) getPerformerSort(findFilter *models.FindFilterType) (s return sortQuery, nil } -func (qb *PerformerStore) tagsRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: performersTagsTable, - idColumn: performerIDColumn, - }, - fkColumn: tagIDColumn, - foreignTable: tagTable, - orderBy: "tags.name ASC", - } -} - func (qb *PerformerStore) GetTagIDs(ctx context.Context, id int) ([]int, error) { - return qb.tagsRepository().getIDs(ctx, id) + return performerRepository.tags.getIDs(ctx, id) } func (qb *PerformerStore) GetImage(ctx context.Context, performerID int) ([]byte, error) { @@ -1218,20 +794,14 @@ func (qb *PerformerStore) destroyImage(ctx context.Context, performerID int) err return qb.blobJoinQueryBuilder.DestroyImage(ctx, performerID, performerImageBlobColumn) } -func (qb *PerformerStore) stashIDRepository() *stashIDRepository { - return &stashIDRepository{ - repository{ - tx: qb.tx, - tableName: "performer_stash_ids", - idColumn: performerIDColumn, - }, - } -} - func (qb *PerformerStore) GetAliases(ctx context.Context, performerID int) ([]string, error) { return performersAliasesTableMgr.get(ctx, performerID) } +func (qb *PerformerStore) GetURLs(ctx context.Context, performerID int) ([]string, error) { + return performersURLsTableMgr.get(ctx, performerID) +} + func (qb *PerformerStore) GetStashIDs(ctx context.Context, performerID int) ([]models.StashID, error) { return performersStashIDsTableMgr.get(ctx, performerID) } diff --git a/pkg/sqlite/performer_filter.go b/pkg/sqlite/performer_filter.go new file mode 100644 index 00000000000..72990a7febd --- /dev/null +++ b/pkg/sqlite/performer_filter.go @@ -0,0 +1,535 @@ +package sqlite + +import ( + "context" + "fmt" + "strconv" + "strings" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type performerFilterHandler struct { + performerFilter *models.PerformerFilterType +} + +func (qb *performerFilterHandler) validate() error { + filter := qb.performerFilter + if filter == nil { + return nil + } + + if err := validateFilterCombination(filter.OperatorFilter); err != nil { + return err + } + + if subFilter := filter.SubFilter(); subFilter != nil { + sqb := &performerFilterHandler{performerFilter: subFilter} + if err := sqb.validate(); err != nil { + return err + } + } + + // if legacy height filter used, ensure only supported modifiers are used + if filter.Height != nil { + // treat as an int filter + intCrit := &models.IntCriterionInput{ + Modifier: filter.Height.Modifier, + } + if !intCrit.ValidModifier() { + return fmt.Errorf("invalid height modifier: %s", filter.Height.Modifier) + } + + // ensure value is a valid number + if _, err := strconv.Atoi(filter.Height.Value); err != nil { + return fmt.Errorf("invalid height value: %s", filter.Height.Value) + } + } + + return nil +} + +func (qb *performerFilterHandler) handle(ctx context.Context, f *filterBuilder) { + filter := qb.performerFilter + if filter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + sf := filter.SubFilter() + if sf != nil { + sub := &performerFilterHandler{sf} + handleSubFilter(ctx, sub, f, filter.OperatorFilter) + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +func (qb *performerFilterHandler) criterionHandler() criterionHandler { + filter := qb.performerFilter + const tableName = performerTable + heightCmCrit := filter.HeightCm + + return compoundHandler{ + stringCriterionHandler(filter.Name, tableName+".name"), + stringCriterionHandler(filter.Disambiguation, tableName+".disambiguation"), + stringCriterionHandler(filter.Details, tableName+".details"), + + boolCriterionHandler(filter.FilterFavorites, tableName+".favorite", nil), + boolCriterionHandler(filter.IgnoreAutoTag, tableName+".ignore_auto_tag", nil), + + yearFilterCriterionHandler(filter.BirthYear, tableName+".birthdate"), + yearFilterCriterionHandler(filter.DeathYear, tableName+".death_date"), + + qb.performerAgeFilterCriterionHandler(filter.Age), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if gender := filter.Gender; gender != nil { + genderCopy := *gender + if genderCopy.Value.IsValid() && len(genderCopy.ValueList) == 0 { + genderCopy.ValueList = []models.GenderEnum{genderCopy.Value} + } + + v := utils.StringerSliceToStringSlice(genderCopy.ValueList) + enumCriterionHandler(genderCopy.Modifier, v, tableName+".gender")(ctx, f) + } + }), + + qb.performerIsMissingCriterionHandler(filter.IsMissing), + stringCriterionHandler(filter.Ethnicity, tableName+".ethnicity"), + stringCriterionHandler(filter.Country, tableName+".country"), + stringCriterionHandler(filter.EyeColor, tableName+".eye_color"), + + // special handler for legacy height filter + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if heightCmCrit == nil && filter.Height != nil { + heightCm, _ := strconv.Atoi(filter.Height.Value) // already validated + heightCmCrit = &models.IntCriterionInput{ + Value: heightCm, + Modifier: filter.Height.Modifier, + } + } + }), + + intCriterionHandler(heightCmCrit, tableName+".height", nil), + + stringCriterionHandler(filter.Measurements, tableName+".measurements"), + stringCriterionHandler(filter.FakeTits, tableName+".fake_tits"), + floatCriterionHandler(filter.PenisLength, tableName+".penis_length", nil), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if circumcised := filter.Circumcised; circumcised != nil { + v := utils.StringerSliceToStringSlice(circumcised.Value) + enumCriterionHandler(circumcised.Modifier, v, tableName+".circumcised")(ctx, f) + } + }), + + stringCriterionHandler(filter.CareerLength, tableName+".career_length"), + stringCriterionHandler(filter.Tattoos, tableName+".tattoos"), + stringCriterionHandler(filter.Piercings, tableName+".piercings"), + intCriterionHandler(filter.Rating100, tableName+".rating", nil), + stringCriterionHandler(filter.HairColor, tableName+".hair_color"), + qb.urlsCriterionHandler(filter.URL), + intCriterionHandler(filter.Weight, tableName+".weight", nil), + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if filter.StashID != nil { + performerRepository.stashIDs.join(f, "performer_stash_ids", "performers.id") + stringCriterionHandler(filter.StashID, "performer_stash_ids.stash_id")(ctx, f) + } + }), + &stashIDCriterionHandler{ + c: filter.StashIDEndpoint, + stashIDRepository: &performerRepository.stashIDs, + stashIDTableAs: "performer_stash_ids", + parentIDCol: "performers.id", + }, + + qb.aliasCriterionHandler(filter.Aliases), + + qb.tagsCriterionHandler(filter.Tags), + + qb.studiosCriterionHandler(filter.Studios), + + qb.appearsWithCriterionHandler(filter.Performers), + + qb.tagCountCriterionHandler(filter.TagCount), + qb.sceneCountCriterionHandler(filter.SceneCount), + qb.imageCountCriterionHandler(filter.ImageCount), + qb.galleryCountCriterionHandler(filter.GalleryCount), + qb.playCounterCriterionHandler(filter.PlayCount), + qb.oCounterCriterionHandler(filter.OCounter), + &dateCriterionHandler{filter.Birthdate, tableName + ".birthdate", nil}, + &dateCriterionHandler{filter.DeathDate, tableName + ".death_date", nil}, + ×tampCriterionHandler{filter.CreatedAt, tableName + ".created_at", nil}, + ×tampCriterionHandler{filter.UpdatedAt, tableName + ".updated_at", nil}, + + &relatedFilterHandler{ + relatedIDCol: "performers_scenes.scene_id", + relatedRepo: sceneRepository.repository, + relatedHandler: &sceneFilterHandler{filter.ScenesFilter}, + joinFn: func(f *filterBuilder) { + performerRepository.scenes.innerJoin(f, "", "performers.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performers_images.image_id", + relatedRepo: imageRepository.repository, + relatedHandler: &imageFilterHandler{filter.ImagesFilter}, + joinFn: func(f *filterBuilder) { + performerRepository.images.innerJoin(f, "", "performers.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performers_galleries.gallery_id", + relatedRepo: galleryRepository.repository, + relatedHandler: &galleryFilterHandler{filter.GalleriesFilter}, + joinFn: func(f *filterBuilder) { + performerRepository.galleries.innerJoin(f, "", "performers.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performer_tag.tag_id", + relatedRepo: tagRepository.repository, + relatedHandler: &tagFilterHandler{filter.TagsFilter}, + joinFn: func(f *filterBuilder) { + performerRepository.tags.innerJoin(f, "performer_tag", "performers.id") + }, + }, + } +} + +// TODO - we need to provide a whitelist of possible values +func (qb *performerFilterHandler) performerIsMissingCriterionHandler(isMissing *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "url": + performersURLsTableMgr.join(f, "", "performers.id") + f.addWhere("performer_urls.url IS NULL") + case "scenes": // Deprecated: use `scene_count == 0` filter instead + f.addLeftJoin(performersScenesTable, "scenes_join", "scenes_join.performer_id = performers.id") + f.addWhere("scenes_join.scene_id IS NULL") + case "image": + f.addWhere("performers.image_blob IS NULL") + case "stash_id": + performersStashIDsTableMgr.join(f, "performer_stash_ids", "performers.id") + f.addWhere("performer_stash_ids.performer_id IS NULL") + case "aliases": + performersAliasesTableMgr.join(f, "", "performers.id") + f.addWhere("performer_aliases.alias IS NULL") + default: + f.addWhere("(performers." + *isMissing + " IS NULL OR TRIM(performers." + *isMissing + ") = '')") + } + } + } +} + +func (qb *performerFilterHandler) performerAgeFilterCriterionHandler(age *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if age != nil && age.Modifier.IsValid() { + clause, args := getIntCriterionWhereClause( + "cast(IFNULL(strftime('%Y.%m%d', performers.death_date), strftime('%Y.%m%d', 'now')) - strftime('%Y.%m%d', performers.birthdate) as int)", + *age, + ) + f.addWhere(clause, args...) + } + } +} + +func (qb *performerFilterHandler) urlsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: performerTable, + primaryFK: performerIDColumn, + joinTable: performerURLsTable, + stringColumn: performerURLColumn, + addJoinTable: func(f *filterBuilder) { + performersURLsTableMgr.join(f, "", "performers.id") + }, + } + + return h.handler(url) +} + +func (qb *performerFilterHandler) aliasCriterionHandler(alias *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: performerTable, + primaryFK: performerIDColumn, + joinTable: performersAliasesTable, + stringColumn: performerAliasColumn, + addJoinTable: func(f *filterBuilder) { + performersAliasesTableMgr.join(f, "", "performers.id") + }, + } + + return h.handler(alias) +} + +func (qb *performerFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: performerTable, + foreignTable: tagTable, + foreignFK: "tag_id", + + relationsTable: "tags_relations", + joinAs: "performer_tag", + joinTable: performersTagsTable, + primaryFK: performerIDColumn, + } + + return h.handler(tags) +} + +func (qb *performerFilterHandler) tagCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: performerTable, + joinTable: performersTagsTable, + primaryFK: performerIDColumn, + } + + return h.handler(count) +} + +func (qb *performerFilterHandler) sceneCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: performerTable, + joinTable: performersScenesTable, + primaryFK: performerIDColumn, + } + + return h.handler(count) +} + +func (qb *performerFilterHandler) imageCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: performerTable, + joinTable: performersImagesTable, + primaryFK: performerIDColumn, + } + + return h.handler(count) +} + +func (qb *performerFilterHandler) galleryCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: performerTable, + joinTable: performersGalleriesTable, + primaryFK: performerIDColumn, + } + + return h.handler(count) +} + +// used for sorting and filtering on performer o-count +var selectPerformerOCountSQL = utils.StrFormat( + "SELECT SUM(o_counter) "+ + "FROM ("+ + "SELECT SUM(o_counter) as o_counter from {performers_images} s "+ + "LEFT JOIN {images} ON {images}.id = s.{images_id} "+ + "WHERE s.{performer_id} = {performers}.id "+ + "UNION ALL "+ + "SELECT COUNT({scenes_o_dates}.{o_date}) as o_counter from {performers_scenes} s "+ + "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+ + "LEFT JOIN {scenes_o_dates} ON {scenes_o_dates}.{scene_id} = {scenes}.id "+ + "WHERE s.{performer_id} = {performers}.id "+ + ")", + map[string]interface{}{ + "performers_images": performersImagesTable, + "images": imageTable, + "performer_id": performerIDColumn, + "images_id": imageIDColumn, + "performers": performerTable, + "performers_scenes": performersScenesTable, + "scenes": sceneTable, + "scene_id": sceneIDColumn, + "scenes_o_dates": scenesODatesTable, + "o_date": sceneODateColumn, + }, +) + +// used for sorting and filtering play count on performer view count +var selectPerformerPlayCountSQL = utils.StrFormat( + "SELECT COUNT(DISTINCT {view_date}) FROM ("+ + "SELECT {view_date} FROM {performers_scenes} s "+ + "LEFT JOIN {scenes} ON {scenes}.id = s.{scene_id} "+ + "LEFT JOIN {scenes_view_dates} ON {scenes_view_dates}.{scene_id} = {scenes}.id "+ + "WHERE s.{performer_id} = {performers}.id"+ + ")", + map[string]interface{}{ + "performer_id": performerIDColumn, + "performers": performerTable, + "performers_scenes": performersScenesTable, + "scenes": sceneTable, + "scene_id": sceneIDColumn, + "scenes_view_dates": scenesViewDatesTable, + "view_date": sceneViewDateColumn, + }, +) + +func (qb *performerFilterHandler) oCounterCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if count == nil { + return + } + + lhs := "(" + selectPerformerOCountSQL + ")" + clause, args := getIntCriterionWhereClause(lhs, *count) + + f.addWhere(clause, args...) + } +} + +func (qb *performerFilterHandler) playCounterCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if count == nil { + return + } + + lhs := "(" + selectPerformerPlayCountSQL + ")" + clause, args := getIntCriterionWhereClause(lhs, *count) + + f.addWhere(clause, args...) + } +} + +func (qb *performerFilterHandler) studiosCriterionHandler(studios *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if studios != nil { + formatMaps := []utils.StrFormatMap{ + { + "primaryTable": sceneTable, + "joinTable": performersScenesTable, + "primaryFK": sceneIDColumn, + }, + { + "primaryTable": imageTable, + "joinTable": performersImagesTable, + "primaryFK": imageIDColumn, + }, + { + "primaryTable": galleryTable, + "joinTable": performersGalleriesTable, + "primaryFK": galleryIDColumn, + }, + } + + if studios.Modifier == models.CriterionModifierIsNull || studios.Modifier == models.CriterionModifierNotNull { + var notClause string + if studios.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + var conditions []string + for _, c := range formatMaps { + f.addLeftJoin(c["joinTable"].(string), "", fmt.Sprintf("%s.performer_id = performers.id", c["joinTable"])) + f.addLeftJoin(c["primaryTable"].(string), "", fmt.Sprintf("%s.%s = %s.id", c["joinTable"], c["primaryFK"], c["primaryTable"])) + + conditions = append(conditions, fmt.Sprintf("%s.studio_id IS NULL", c["primaryTable"])) + } + + f.addWhere(fmt.Sprintf("%s (%s)", notClause, strings.Join(conditions, " AND "))) + return + } + + if len(studios.Value) == 0 { + return + } + + var clauseCondition string + + switch studios.Modifier { + case models.CriterionModifierIncludes: + // return performers who appear in scenes/images/galleries with any of the given studios + clauseCondition = "NOT" + case models.CriterionModifierExcludes: + // exclude performers who appear in scenes/images/galleries with any of the given studios + clauseCondition = "" + default: + return + } + + const derivedPerformerStudioTable = "performer_studio" + valuesClause, err := getHierarchicalValues(ctx, studios.Value, studioTable, "", "parent_id", "child_id", studios.Depth) + if err != nil { + f.setError(err) + return + } + f.addWith("studio(root_id, item_id) AS (" + valuesClause + ")") + + templStr := `SELECT performer_id FROM {primaryTable} + INNER JOIN {joinTable} ON {primaryTable}.id = {joinTable}.{primaryFK} + INNER JOIN studio ON {primaryTable}.studio_id = studio.item_id` + + var unions []string + for _, c := range formatMaps { + unions = append(unions, utils.StrFormat(templStr, c)) + } + + f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerStudioTable, strings.Join(unions, " UNION "))) + + f.addLeftJoin(derivedPerformerStudioTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerStudioTable)) + f.addWhere(fmt.Sprintf("%s.performer_id IS %s NULL", derivedPerformerStudioTable, clauseCondition)) + } + } +} + +func (qb *performerFilterHandler) appearsWithCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performers != nil { + formatMaps := []utils.StrFormatMap{ + { + "primaryTable": performersScenesTable, + "joinTable": performersScenesTable, + "primaryFK": sceneIDColumn, + }, + { + "primaryTable": performersImagesTable, + "joinTable": performersImagesTable, + "primaryFK": imageIDColumn, + }, + { + "primaryTable": performersGalleriesTable, + "joinTable": performersGalleriesTable, + "primaryFK": galleryIDColumn, + }, + } + + if len(performers.Value) == '0' { + return + } + + const derivedPerformerPerformersTable = "performer_performers" + + valuesClause := strings.Join(performers.Value, "),(") + + f.addWith("performer(id) AS (VALUES(" + valuesClause + "))") + + templStr := `SELECT {primaryTable}2.performer_id FROM {primaryTable} + INNER JOIN {primaryTable} AS {primaryTable}2 ON {primaryTable}.{primaryFK} = {primaryTable}2.{primaryFK} + INNER JOIN performer ON {primaryTable}.performer_id = performer.id + WHERE {primaryTable}2.performer_id != performer.id` + + if performers.Modifier == models.CriterionModifierIncludesAll && len(performers.Value) > 1 { + templStr += ` + GROUP BY {primaryTable}2.performer_id + HAVING(count(distinct {primaryTable}.performer_id) IS ` + strconv.Itoa(len(performers.Value)) + `)` + } + + var unions []string + for _, c := range formatMaps { + unions = append(unions, utils.StrFormat(templStr, c)) + } + + f.addWith(fmt.Sprintf("%s AS (%s)", derivedPerformerPerformersTable, strings.Join(unions, " UNION "))) + + f.addInnerJoin(derivedPerformerPerformersTable, "", fmt.Sprintf("performers.id = %s.performer_id", derivedPerformerPerformersTable)) + } + } +} diff --git a/pkg/sqlite/performer_test.go b/pkg/sqlite/performer_test.go index 8ba32964b82..e0294f3e442 100644 --- a/pkg/sqlite/performer_test.go +++ b/pkg/sqlite/performer_test.go @@ -22,6 +22,11 @@ func loadPerformerRelationships(ctx context.Context, expected models.Performer, return err } } + if expected.URLs.Loaded() { + if err := actual.LoadURLs(ctx, db.Performer); err != nil { + return err + } + } if expected.TagIDs.Loaded() { if err := actual.LoadTagIDs(ctx, db.Performer); err != nil { return err @@ -45,6 +50,7 @@ func Test_PerformerStore_Create(t *testing.T) { url = "url" twitter = "twitter" instagram = "instagram" + urls = []string{url, twitter, instagram} rating = 3 ethnicity = "ethnicity" country = "country" @@ -84,9 +90,7 @@ func Test_PerformerStore_Create(t *testing.T) { Name: name, Disambiguation: disambiguation, Gender: &gender, - URL: url, - Twitter: twitter, - Instagram: instagram, + URLs: models.NewRelatedStrings(urls), Birthdate: &birthdate, Ethnicity: ethnicity, Country: country, @@ -193,6 +197,7 @@ func Test_PerformerStore_Update(t *testing.T) { url = "url" twitter = "twitter" instagram = "instagram" + urls = []string{url, twitter, instagram} rating = 3 ethnicity = "ethnicity" country = "country" @@ -233,9 +238,7 @@ func Test_PerformerStore_Update(t *testing.T) { Name: name, Disambiguation: disambiguation, Gender: &gender, - URL: url, - Twitter: twitter, - Instagram: instagram, + URLs: models.NewRelatedStrings(urls), Birthdate: &birthdate, Ethnicity: ethnicity, Country: country, @@ -277,6 +280,7 @@ func Test_PerformerStore_Update(t *testing.T) { &models.Performer{ ID: performerIDs[performerIdxWithGallery], Aliases: models.NewRelatedStrings([]string{}), + URLs: models.NewRelatedStrings([]string{}), TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}), }, @@ -341,9 +345,7 @@ func clearPerformerPartial() models.PerformerPartial { return models.PerformerPartial{ Disambiguation: nullString, Gender: nullString, - URL: nullString, - Twitter: nullString, - Instagram: nullString, + URLs: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, Birthdate: nullDate, Ethnicity: nullString, Country: nullString, @@ -376,6 +378,7 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { url = "url" twitter = "twitter" instagram = "instagram" + urls = []string{url, twitter, instagram} rating = 3 ethnicity = "ethnicity" country = "country" @@ -418,21 +421,22 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { Name: models.NewOptionalString(name), Disambiguation: models.NewOptionalString(disambiguation), Gender: models.NewOptionalString(gender.String()), - URL: models.NewOptionalString(url), - Twitter: models.NewOptionalString(twitter), - Instagram: models.NewOptionalString(instagram), - Birthdate: models.NewOptionalDate(birthdate), - Ethnicity: models.NewOptionalString(ethnicity), - Country: models.NewOptionalString(country), - EyeColor: models.NewOptionalString(eyeColor), - Height: models.NewOptionalInt(height), - Measurements: models.NewOptionalString(measurements), - FakeTits: models.NewOptionalString(fakeTits), - PenisLength: models.NewOptionalFloat64(penisLength), - Circumcised: models.NewOptionalString(circumcised.String()), - CareerLength: models.NewOptionalString(careerLength), - Tattoos: models.NewOptionalString(tattoos), - Piercings: models.NewOptionalString(piercings), + URLs: &models.UpdateStrings{ + Values: urls, + Mode: models.RelationshipUpdateModeSet, + }, + Birthdate: models.NewOptionalDate(birthdate), + Ethnicity: models.NewOptionalString(ethnicity), + Country: models.NewOptionalString(country), + EyeColor: models.NewOptionalString(eyeColor), + Height: models.NewOptionalInt(height), + Measurements: models.NewOptionalString(measurements), + FakeTits: models.NewOptionalString(fakeTits), + PenisLength: models.NewOptionalFloat64(penisLength), + Circumcised: models.NewOptionalString(circumcised.String()), + CareerLength: models.NewOptionalString(careerLength), + Tattoos: models.NewOptionalString(tattoos), + Piercings: models.NewOptionalString(piercings), Aliases: &models.UpdateStrings{ Values: aliases, Mode: models.RelationshipUpdateModeSet, @@ -469,9 +473,7 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { Name: name, Disambiguation: disambiguation, Gender: &gender, - URL: url, - Twitter: twitter, - Instagram: instagram, + URLs: models.NewRelatedStrings(urls), Birthdate: &birthdate, Ethnicity: ethnicity, Country: country, @@ -516,6 +518,7 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { ID: performerIDs[performerIdxWithTwoTags], Name: getPerformerStringValue(performerIdxWithTwoTags, "Name"), Favorite: getPerformerBoolValue(performerIdxWithTwoTags), + URLs: models.NewRelatedStrings([]string{}), Aliases: models.NewRelatedStrings([]string{}), TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}), @@ -731,10 +734,12 @@ func TestPerformerQueryEthnicityOr(t *testing.T) { Value: performer1Eth, Modifier: models.CriterionModifierEquals, }, - Or: &models.PerformerFilterType{ - Ethnicity: &models.StringCriterionInput{ - Value: performer2Eth, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.PerformerFilterType]{ + Or: &models.PerformerFilterType{ + Ethnicity: &models.StringCriterionInput{ + Value: performer2Eth, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -760,10 +765,12 @@ func TestPerformerQueryEthnicityAndRating(t *testing.T) { Value: performerEth, Modifier: models.CriterionModifierEquals, }, - And: &models.PerformerFilterType{ - Rating100: &models.IntCriterionInput{ - Value: performerRating, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.PerformerFilterType]{ + And: &models.PerformerFilterType{ + Rating100: &models.IntCriterionInput{ + Value: performerRating, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -801,8 +808,10 @@ func TestPerformerQueryEthnicityNotRating(t *testing.T) { performerFilter := models.PerformerFilterType{ Ethnicity: ðCriterion, - Not: &models.PerformerFilterType{ - Rating100: &ratingCriterion, + OperatorFilter: models.OperatorFilter[models.PerformerFilterType]{ + Not: &models.PerformerFilterType{ + Rating100: &ratingCriterion, + }, }, } @@ -838,24 +847,30 @@ func TestPerformerIllegalQuery(t *testing.T) { // And and Or in the same filter "AndOr", models.PerformerFilterType{ - And: &subFilter, - Or: &subFilter, + OperatorFilter: models.OperatorFilter[models.PerformerFilterType]{ + And: &subFilter, + Or: &subFilter, + }, }, }, { // And and Not in the same filter "AndNot", models.PerformerFilterType{ - And: &subFilter, - Not: &subFilter, + OperatorFilter: models.OperatorFilter[models.PerformerFilterType]{ + And: &subFilter, + Not: &subFilter, + }, }, }, { // Or and Not in the same filter "OrNot", models.PerformerFilterType{ - Or: &subFilter, - Not: &subFilter, + OperatorFilter: models.OperatorFilter[models.PerformerFilterType]{ + Or: &subFilter, + Not: &subFilter, + }, }, }, { @@ -1278,7 +1293,14 @@ func TestPerformerQueryURL(t *testing.T) { verifyFn := func(g *models.Performer) { t.Helper() - verifyString(t, g.URL, urlCriterion) + + urls := g.URLs.List() + var url string + if len(urls) > 0 { + url = urls[0] + } + + verifyString(t, url, urlCriterion) } verifyPerformerQuery(t, filter, verifyFn) @@ -1306,6 +1328,12 @@ func verifyPerformerQuery(t *testing.T, filter models.PerformerFilterType, verif t.Helper() performers := queryPerformers(ctx, t, &filter, nil) + for _, performer := range performers { + if err := performer.LoadURLs(ctx, db.Performer); err != nil { + t.Errorf("Error loading url relationships: %v", err) + } + } + // assume it should find at least one assert.Greater(t, len(performers), 0) diff --git a/pkg/sqlite/query.go b/pkg/sqlite/query.go index 597ab66b98f..9c09d8beaed 100644 --- a/pkg/sqlite/query.go +++ b/pkg/sqlite/query.go @@ -110,6 +110,16 @@ func (qb *queryBuilder) addArg(args ...interface{}) { qb.args = append(qb.args, args...) } +func (qb *queryBuilder) hasJoin(alias string) bool { + for _, j := range qb.joins { + if j.alias() == alias { + return true + } + } + + return false +} + func (qb *queryBuilder) join(table, as, onClause string) { newJoin := join{ table: table, diff --git a/pkg/sqlite/relationships.go b/pkg/sqlite/relationships.go new file mode 100644 index 00000000000..32c8fda649c --- /dev/null +++ b/pkg/sqlite/relationships.go @@ -0,0 +1,41 @@ +package sqlite + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" +) + +type idRelationshipStore struct { + joinTable *joinTable +} + +func (s *idRelationshipStore) createRelationships(ctx context.Context, id int, fkIDs models.RelatedIDs) error { + if fkIDs.Loaded() { + if err := s.joinTable.insertJoins(ctx, id, fkIDs.List()); err != nil { + return err + } + } + + return nil +} + +func (s *idRelationshipStore) modifyRelationships(ctx context.Context, id int, fkIDs *models.UpdateIDs) error { + if fkIDs != nil { + if err := s.joinTable.modifyJoins(ctx, id, fkIDs.IDs, fkIDs.Mode); err != nil { + return err + } + } + + return nil +} + +func (s *idRelationshipStore) replaceRelationships(ctx context.Context, id int, fkIDs models.RelatedIDs) error { + if fkIDs.Loaded() { + if err := s.joinTable.replaceJoins(ctx, id, fkIDs.List()); err != nil { + return err + } + } + + return nil +} diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index fe0961ff590..8eb87b9aff1 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -20,7 +20,6 @@ type objectList interface { } type repository struct { - tx dbWrapper tableName string idColumn string } @@ -48,7 +47,7 @@ func (r *repository) destroyExisting(ctx context.Context, ids []int) error { func (r *repository) destroy(ctx context.Context, ids []int) error { for _, id := range ids { stmt := fmt.Sprintf("DELETE FROM %s WHERE %s = ?", r.tableName, r.idColumn) - if _, err := r.tx.Exec(ctx, stmt, id); err != nil { + if _, err := dbWrapper.Exec(ctx, stmt, id); err != nil { return err } } @@ -78,7 +77,7 @@ func (r *repository) runCountQuery(ctx context.Context, query string, args []int }{0} // Perform query and fetch result - if err := r.tx.Get(ctx, &result, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { + if err := dbWrapper.Get(ctx, &result, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { return 0, err } @@ -90,7 +89,7 @@ func (r *repository) runIdsQuery(ctx context.Context, query string, args []inter Int int `db:"id"` } - if err := r.tx.Select(ctx, &result, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { + if err := dbWrapper.Select(ctx, &result, query, args...); err != nil && !errors.Is(err, sql.ErrNoRows) { return []int{}, fmt.Errorf("running query: %s [%v]: %w", query, args, err) } @@ -102,7 +101,7 @@ func (r *repository) runIdsQuery(ctx context.Context, query string, args []inter } func (r *repository) queryFunc(ctx context.Context, query string, args []interface{}, single bool, f func(rows *sqlx.Rows) error) error { - rows, err := r.tx.Queryx(ctx, query, args...) + rows, err := dbWrapper.Queryx(ctx, query, args...) if err != nil && !errors.Is(err, sql.ErrNoRows) { return err @@ -150,7 +149,7 @@ func (r *repository) queryStruct(ctx context.Context, query string, args []inter } func (r *repository) querySimple(ctx context.Context, query string, args []interface{}, out interface{}) error { - rows, err := r.tx.Queryx(ctx, query, args...) + rows, err := dbWrapper.Queryx(ctx, query, args...) if err != nil && !errors.Is(err, sql.ErrNoRows) { return err @@ -230,7 +229,6 @@ func (r *repository) join(j joiner, as string, parentIDCol string) { j.addLeftJoin(r.tableName, as, fmt.Sprintf("%s.%s = %s", t, r.idColumn, parentIDCol)) } -//nolint:golint,unused func (r *repository) innerJoin(j joiner, as string, parentIDCol string) { t := r.tableName if as != "" { @@ -269,7 +267,7 @@ func (r *joinRepository) getIDs(ctx context.Context, id int) ([]int, error) { } func (r *joinRepository) insert(ctx context.Context, id int, foreignIDs ...int) error { - stmt, err := r.tx.Prepare(ctx, fmt.Sprintf("INSERT INTO %s (%s, %s) VALUES (?, ?)", r.tableName, r.idColumn, r.fkColumn)) + stmt, err := dbWrapper.Prepare(ctx, fmt.Sprintf("INSERT INTO %s (%s, %s) VALUES (?, ?)", r.tableName, r.idColumn, r.fkColumn)) if err != nil { return err } @@ -277,7 +275,7 @@ func (r *joinRepository) insert(ctx context.Context, id int, foreignIDs ...int) defer stmt.Close() for _, fk := range foreignIDs { - if _, err := r.tx.ExecStmt(ctx, stmt, id, fk); err != nil { + if _, err := dbWrapper.ExecStmt(ctx, stmt, id, fk); err != nil { return err } } @@ -286,7 +284,7 @@ func (r *joinRepository) insert(ctx context.Context, id int, foreignIDs ...int) // insertOrIgnore inserts a join into the table, silently failing in the event that a conflict occurs (ie when the join already exists) func (r *joinRepository) insertOrIgnore(ctx context.Context, id int, foreignIDs ...int) error { - stmt, err := r.tx.Prepare(ctx, fmt.Sprintf("INSERT INTO %s (%s, %s) VALUES (?, ?) ON CONFLICT (%[2]s, %s) DO NOTHING", r.tableName, r.idColumn, r.fkColumn)) + stmt, err := dbWrapper.Prepare(ctx, fmt.Sprintf("INSERT INTO %s (%s, %s) VALUES (?, ?) ON CONFLICT (%[2]s, %s) DO NOTHING", r.tableName, r.idColumn, r.fkColumn)) if err != nil { return err } @@ -294,7 +292,7 @@ func (r *joinRepository) insertOrIgnore(ctx context.Context, id int, foreignIDs defer stmt.Close() for _, fk := range foreignIDs { - if _, err := r.tx.ExecStmt(ctx, stmt, id, fk); err != nil { + if _, err := dbWrapper.ExecStmt(ctx, stmt, id, fk); err != nil { return err } } @@ -310,7 +308,7 @@ func (r *joinRepository) destroyJoins(ctx context.Context, id int, foreignIDs .. args[i+1] = v } - if _, err := r.tx.Exec(ctx, stmt, args...); err != nil { + if _, err := dbWrapper.Exec(ctx, stmt, args...); err != nil { return err } @@ -360,7 +358,7 @@ func (r *captionRepository) get(ctx context.Context, id models.FileID) ([]*model func (r *captionRepository) insert(ctx context.Context, id models.FileID, caption *models.VideoCaption) (sql.Result, error) { stmt := fmt.Sprintf("INSERT INTO %s (%s, %s, %s, %s) VALUES (?, ?, ?, ?)", r.tableName, r.idColumn, captionCodeColumn, captionFilenameColumn, captionTypeColumn) - return r.tx.Exec(ctx, stmt, id, caption.LanguageCode, caption.Filename, caption.CaptionType) + return dbWrapper.Exec(ctx, stmt, id, caption.LanguageCode, caption.Filename, caption.CaptionType) } func (r *captionRepository) replace(ctx context.Context, id models.FileID, captions []*models.VideoCaption) error { @@ -399,7 +397,7 @@ func (r *stringRepository) get(ctx context.Context, id int) ([]string, error) { func (r *stringRepository) insert(ctx context.Context, id int, s string) (sql.Result, error) { stmt := fmt.Sprintf("INSERT INTO %s (%s, %s) VALUES (?, ?)", r.tableName, r.idColumn, r.stringColumn) - return r.tx.Exec(ctx, stmt, id, s) + return dbWrapper.Exec(ctx, stmt, id, s) } func (r *stringRepository) replace(ctx context.Context, id int, newStrings []string) error { diff --git a/pkg/sqlite/saved_filter.go b/pkg/sqlite/saved_filter.go index e4369bda5c3..8f58b05e76c 100644 --- a/pkg/sqlite/saved_filter.go +++ b/pkg/sqlite/saved_filter.go @@ -141,23 +141,6 @@ func (qb *SavedFilterStore) Update(ctx context.Context, updatedObject *models.Sa return nil } -func (qb *SavedFilterStore) SetDefault(ctx context.Context, obj *models.SavedFilter) error { - // find the existing default - existing, err := qb.FindDefault(ctx, obj.Mode) - if err != nil { - return err - } - - obj.Name = savedFilterDefaultName - - if existing != nil { - obj.ID = existing.ID - return qb.Update(ctx, obj) - } - - return qb.Create(ctx, obj) -} - func (qb *SavedFilterStore) Destroy(ctx context.Context, id int) error { return qb.destroyExisting(ctx, []int{id}) } @@ -245,29 +228,24 @@ func (qb *SavedFilterStore) getMany(ctx context.Context, q *goqu.SelectDataset) func (qb *SavedFilterStore) FindByMode(ctx context.Context, mode models.FilterMode) ([]*models.SavedFilter, error) { // SELECT * FROM %s WHERE mode = ? AND name != ? ORDER BY name ASC table := qb.table() - sq := qb.selectDataset().Prepared(true).Where( - table.Col("mode").Eq(mode), - table.Col("name").Neq(savedFilterDefaultName), - ).Order(table.Col("name").Asc()) - ret, err := qb.getMany(ctx, sq) - if err != nil { - return nil, err + // TODO - querying on groups needs to include movies + // remove this when we migrate to remove the movies filter mode in the database + var whereClause exp.Expression + + if mode == models.FilterModeGroups || mode == models.FilterModeMovies { + whereClause = goqu.Or( + table.Col("mode").Eq(models.FilterModeGroups), + table.Col("mode").Eq(models.FilterModeMovies), + ) + } else { + whereClause = table.Col("mode").Eq(mode) } - return ret, nil -} - -func (qb *SavedFilterStore) FindDefault(ctx context.Context, mode models.FilterMode) (*models.SavedFilter, error) { - // SELECT * FROM saved_filters WHERE mode = ? AND name = ? - table := qb.table() - sq := qb.selectDataset().Prepared(true).Where( - table.Col("mode").Eq(mode), - table.Col("name").Eq(savedFilterDefaultName), - ) + sq := qb.selectDataset().Prepared(true).Where(whereClause).Order(table.Col("name").Asc()) + ret, err := qb.getMany(ctx, sq) - ret, err := qb.get(ctx, sq) - if err != nil && !errors.Is(err, sql.ErrNoRows) { + if err != nil { return nil, err } diff --git a/pkg/sqlite/saved_filter_test.go b/pkg/sqlite/saved_filter_test.go index aa98121fd45..60592a923df 100644 --- a/pkg/sqlite/saved_filter_test.go +++ b/pkg/sqlite/saved_filter_test.go @@ -96,66 +96,6 @@ func TestSavedFilterDestroy(t *testing.T) { }) } -func TestSavedFilterFindDefault(t *testing.T) { - withTxn(func(ctx context.Context) error { - def, err := db.SavedFilter.FindDefault(ctx, models.FilterModeScenes) - if err == nil { - assert.Equal(t, savedFilterIDs[savedFilterIdxDefaultScene], def.ID) - } - - return err - }) -} - -func TestSavedFilterSetDefault(t *testing.T) { - filterQ := "" - filterPage := 1 - filterPerPage := 40 - filterSort := "date" - filterDirection := models.SortDirectionEnumAsc - findFilter := models.FindFilterType{ - Q: &filterQ, - Page: &filterPage, - PerPage: &filterPerPage, - Sort: &filterSort, - Direction: &filterDirection, - } - objectFilter := map[string]interface{}{ - "test": "foo", - } - uiOptions := map[string]interface{}{ - "display_mode": 1, - "zoom_index": 1, - } - - withTxn(func(ctx context.Context) error { - err := db.SavedFilter.SetDefault(ctx, &models.SavedFilter{ - Mode: models.FilterModeMovies, - FindFilter: &findFilter, - ObjectFilter: objectFilter, - UIOptions: uiOptions, - }) - - return err - }) - - var defID int - withTxn(func(ctx context.Context) error { - def, err := db.SavedFilter.FindDefault(ctx, models.FilterModeMovies) - if err == nil { - defID = def.ID - assert.Equal(t, &findFilter, def.FindFilter) - } - - return err - }) - - // destroy it again - withTxn(func(ctx context.Context) error { - return db.SavedFilter.Destroy(ctx, defID) - }) -} - // TODO Update // TODO Destroy // TODO Find diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index 8c35d162c19..c950be4d160 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -28,7 +28,7 @@ const ( performersScenesTable = "performers_scenes" scenesTagsTable = "scenes_tags" scenesGalleriesTable = "scenes_galleries" - moviesScenesTable = "movies_scenes" + groupsScenesTable = "groups_scenes" scenesURLsTable = "scene_urls" sceneURLColumn = "url" scenesViewDatesTable = "scenes_view_dates" @@ -168,23 +168,78 @@ func (r *sceneRowRecord) fromPartial(o models.ScenePartial) { r.setFloat64("play_duration", o.PlayDuration) } -type SceneStore struct { +type sceneRepositoryType struct { repository + galleries joinRepository + tags joinRepository + performers joinRepository + groups repository + + files filesRepository + + stashIDs stashIDRepository +} + +var ( + sceneRepository = sceneRepositoryType{ + repository: repository{ + tableName: sceneTable, + idColumn: idColumn, + }, + galleries: joinRepository{ + repository: repository{ + tableName: scenesGalleriesTable, + idColumn: sceneIDColumn, + }, + fkColumn: galleryIDColumn, + }, + tags: joinRepository{ + repository: repository{ + tableName: scenesTagsTable, + idColumn: sceneIDColumn, + }, + fkColumn: tagIDColumn, + foreignTable: tagTable, + orderBy: "tags.name ASC", + }, + performers: joinRepository{ + repository: repository{ + tableName: performersScenesTable, + idColumn: sceneIDColumn, + }, + fkColumn: performerIDColumn, + }, + groups: repository{ + tableName: groupsScenesTable, + idColumn: sceneIDColumn, + }, + files: filesRepository{ + repository: repository{ + tableName: scenesFilesTable, + idColumn: sceneIDColumn, + }, + }, + stashIDs: stashIDRepository{ + repository{ + tableName: "scene_stash_ids", + idColumn: sceneIDColumn, + }, + }, + } +) + +type SceneStore struct { blobJoinQueryBuilder tableMgr *table oDateManager viewDateManager - fileStore *FileStore + repo *storeRepository } -func NewSceneStore(fileStore *FileStore, blobStore *BlobStore) *SceneStore { +func NewSceneStore(r *storeRepository, blobStore *BlobStore) *SceneStore { return &SceneStore{ - repository: repository{ - tableName: sceneTable, - idColumn: idColumn, - }, blobJoinQueryBuilder: blobJoinQueryBuilder{ blobStore: blobStore, joinTable: sceneTable, @@ -193,7 +248,7 @@ func NewSceneStore(fileStore *FileStore, blobStore *BlobStore) *SceneStore { tableMgr: sceneTableMgr, viewDateManager: viewDateManager{scenesViewTableMgr}, oDateManager: oDateManager{scenesOTableMgr}, - fileStore: fileStore, + repo: r, } } @@ -288,8 +343,8 @@ func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileI } } - if newObject.Movies.Loaded() { - if err := scenesMoviesTableMgr.insertJoins(ctx, id, newObject.Movies.List()); err != nil { + if newObject.Groups.Loaded() { + if err := scenesGroupsTableMgr.insertJoins(ctx, id, newObject.Groups.List()); err != nil { return err } } @@ -344,8 +399,8 @@ func (qb *SceneStore) UpdatePartial(ctx context.Context, id int, partial models. return nil, err } } - if partial.MovieIDs != nil { - if err := scenesMoviesTableMgr.modifyJoins(ctx, id, partial.MovieIDs.Movies, partial.MovieIDs.Mode); err != nil { + if partial.GroupIDs != nil { + if err := scenesGroupsTableMgr.modifyJoins(ctx, id, partial.GroupIDs.Groups, partial.GroupIDs.Mode); err != nil { return nil, err } } @@ -396,8 +451,8 @@ func (qb *SceneStore) Update(ctx context.Context, updatedObject *models.Scene) e } } - if updatedObject.Movies.Loaded() { - if err := scenesMoviesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.Movies.List()); err != nil { + if updatedObject.Groups.Loaded() { + if err := scenesGroupsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.Groups.List()); err != nil { return err } } @@ -531,13 +586,13 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo } func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*models.VideoFile, error) { - fileIDs, err := qb.filesRepository().get(ctx, id) + fileIDs, err := sceneRepository.files.get(ctx, id) if err != nil { return nil, err } // use fileStore to load files - files, err := qb.fileStore.Find(ctx, fileIDs...) + files, err := qb.repo.File.Find(ctx, fileIDs...) if err != nil { return nil, err } @@ -556,7 +611,7 @@ func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*models.VideoFile func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false - return qb.filesRepository().getMany(ctx, ids, primaryOnly) + return sceneRepository.files.getMany(ctx, ids, primaryOnly) } func (qb *SceneStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { @@ -723,26 +778,19 @@ func (qb *SceneStore) OCountByPerformerID(ctx context.Context, performerID int) return ret, nil } -func (qb *SceneStore) FindByMovieID(ctx context.Context, movieID int) ([]*models.Scene, error) { - sq := dialect.From(scenesMoviesJoinTable).Select(scenesMoviesJoinTable.Col(sceneIDColumn)).Where( - scenesMoviesJoinTable.Col(movieIDColumn).Eq(movieID), +func (qb *SceneStore) FindByGroupID(ctx context.Context, groupID int) ([]*models.Scene, error) { + sq := dialect.From(scenesGroupsJoinTable).Select(scenesGroupsJoinTable.Col(sceneIDColumn)).Where( + scenesGroupsJoinTable.Col(groupIDColumn).Eq(groupID), ) ret, err := qb.findBySubquery(ctx, sq) if err != nil { - return nil, fmt.Errorf("getting scenes for movie %d: %w", movieID, err) + return nil, fmt.Errorf("getting scenes for group %d: %w", groupID, err) } return ret, nil } -func (qb *SceneStore) CountByMovieID(ctx context.Context, movieID int) (int, error) { - joinTable := scenesMoviesJoinTable - - q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(movieIDColumn).Eq(movieID)) - return count(ctx, q) -} - func (qb *SceneStore) Count(ctx context.Context) (int, error) { q := dialect.Select(goqu.COUNT("*")).From(qb.table()) return count(ctx, q) @@ -803,6 +851,7 @@ func (qb *SceneStore) PlayDuration(ctx context.Context) (float64, error) { return ret, nil } +// TODO - currently only used by unit test func (qb *SceneStore) CountByStudioID(ctx context.Context, studioID int) (int, error) { table := qb.table() @@ -810,13 +859,6 @@ func (qb *SceneStore) CountByStudioID(ctx context.Context, studioID int) (int, e return count(ctx, q) } -func (qb *SceneStore) CountByTagID(ctx context.Context, tagID int) (int, error) { - joinTable := scenesTagsJoinTable - - q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(tagIDColumn).Eq(tagID)) - return count(ctx, q) -} - func (qb *SceneStore) countMissingFingerprints(ctx context.Context, fpType string) (int, error) { fpTable := fingerprintTableMgr.table.As("fingerprints_temp") @@ -864,176 +906,6 @@ func (qb *SceneStore) All(ctx context.Context) ([]*models.Scene, error) { )) } -func illegalFilterCombination(type1, type2 string) error { - return fmt.Errorf("cannot have %s and %s in the same filter", type1, type2) -} - -func (qb *SceneStore) validateFilter(sceneFilter *models.SceneFilterType) error { - const and = "AND" - const or = "OR" - const not = "NOT" - - if sceneFilter.And != nil { - if sceneFilter.Or != nil { - return illegalFilterCombination(and, or) - } - if sceneFilter.Not != nil { - return illegalFilterCombination(and, not) - } - - return qb.validateFilter(sceneFilter.And) - } - - if sceneFilter.Or != nil { - if sceneFilter.Not != nil { - return illegalFilterCombination(or, not) - } - - return qb.validateFilter(sceneFilter.Or) - } - - if sceneFilter.Not != nil { - return qb.validateFilter(sceneFilter.Not) - } - - return nil -} - -func (qb *SceneStore) makeFilter(ctx context.Context, sceneFilter *models.SceneFilterType) *filterBuilder { - query := &filterBuilder{} - - if sceneFilter.And != nil { - query.and(qb.makeFilter(ctx, sceneFilter.And)) - } - if sceneFilter.Or != nil { - query.or(qb.makeFilter(ctx, sceneFilter.Or)) - } - if sceneFilter.Not != nil { - query.not(qb.makeFilter(ctx, sceneFilter.Not)) - } - - query.handleCriterion(ctx, intCriterionHandler(sceneFilter.ID, "scenes.id", nil)) - query.handleCriterion(ctx, pathCriterionHandler(sceneFilter.Path, "folders.path", "files.basename", qb.addFoldersTable)) - query.handleCriterion(ctx, sceneFileCountCriterionHandler(qb, sceneFilter.FileCount)) - query.handleCriterion(ctx, stringCriterionHandler(sceneFilter.Title, "scenes.title")) - query.handleCriterion(ctx, stringCriterionHandler(sceneFilter.Code, "scenes.code")) - query.handleCriterion(ctx, stringCriterionHandler(sceneFilter.Details, "scenes.details")) - query.handleCriterion(ctx, stringCriterionHandler(sceneFilter.Director, "scenes.director")) - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if sceneFilter.Oshash != nil { - qb.addSceneFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_oshash", "scenes_files.file_id = fingerprints_oshash.file_id AND fingerprints_oshash.type = 'oshash'") - } - - stringCriterionHandler(sceneFilter.Oshash, "fingerprints_oshash.fingerprint")(ctx, f) - })) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if sceneFilter.Checksum != nil { - qb.addSceneFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_md5", "scenes_files.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'") - } - - stringCriterionHandler(sceneFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) - })) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if sceneFilter.Phash != nil { - // backwards compatibility - scenePhashDistanceCriterionHandler(qb, &models.PhashDistanceCriterionInput{ - Value: sceneFilter.Phash.Value, - Modifier: sceneFilter.Phash.Modifier, - })(ctx, f) - } - })) - - query.handleCriterion(ctx, scenePhashDistanceCriterionHandler(qb, sceneFilter.PhashDistance)) - - query.handleCriterion(ctx, intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil)) - query.handleCriterion(ctx, sceneOCountCriterionHandler(sceneFilter.OCounter)) - query.handleCriterion(ctx, boolCriterionHandler(sceneFilter.Organized, "scenes.organized", nil)) - - query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.Duration, "video_files.duration", qb.addVideoFilesTable)) - query.handleCriterion(ctx, resolutionCriterionHandler(sceneFilter.Resolution, "video_files.height", "video_files.width", qb.addVideoFilesTable)) - query.handleCriterion(ctx, orientationCriterionHandler(sceneFilter.Orientation, "video_files.height", "video_files.width", qb.addVideoFilesTable)) - query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.Framerate, "ROUND(video_files.frame_rate)", qb.addVideoFilesTable)) - query.handleCriterion(ctx, intCriterionHandler(sceneFilter.Bitrate, "video_files.bit_rate", qb.addVideoFilesTable)) - query.handleCriterion(ctx, codecCriterionHandler(sceneFilter.VideoCodec, "video_files.video_codec", qb.addVideoFilesTable)) - query.handleCriterion(ctx, codecCriterionHandler(sceneFilter.AudioCodec, "video_files.audio_codec", qb.addVideoFilesTable)) - - query.handleCriterion(ctx, hasMarkersCriterionHandler(sceneFilter.HasMarkers)) - query.handleCriterion(ctx, sceneIsMissingCriterionHandler(qb, sceneFilter.IsMissing)) - query.handleCriterion(ctx, sceneURLsCriterionHandler(sceneFilter.URL)) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if sceneFilter.StashID != nil { - qb.stashIDRepository().join(f, "scene_stash_ids", "scenes.id") - stringCriterionHandler(sceneFilter.StashID, "scene_stash_ids.stash_id")(ctx, f) - } - })) - query.handleCriterion(ctx, &stashIDCriterionHandler{ - c: sceneFilter.StashIDEndpoint, - stashIDRepository: qb.stashIDRepository(), - stashIDTableAs: "scene_stash_ids", - parentIDCol: "scenes.id", - }) - - query.handleCriterion(ctx, boolCriterionHandler(sceneFilter.Interactive, "video_files.interactive", qb.addVideoFilesTable)) - query.handleCriterion(ctx, intCriterionHandler(sceneFilter.InteractiveSpeed, "video_files.interactive_speed", qb.addVideoFilesTable)) - - query.handleCriterion(ctx, sceneCaptionCriterionHandler(qb, sceneFilter.Captions)) - - query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.ResumeTime, "scenes.resume_time", nil)) - query.handleCriterion(ctx, floatIntCriterionHandler(sceneFilter.PlayDuration, "scenes.play_duration", nil)) - query.handleCriterion(ctx, scenePlayCountCriterionHandler(sceneFilter.PlayCount)) - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if sceneFilter.LastPlayedAt != nil { - f.addLeftJoin( - fmt.Sprintf("(SELECT %s, MAX(%s) as last_played_at FROM %s GROUP BY %s)", sceneIDColumn, sceneViewDateColumn, scenesViewDatesTable, sceneIDColumn), - "scene_last_view", - fmt.Sprintf("scene_last_view.%s = scenes.id", sceneIDColumn), - ) - timestampCriterionHandler(sceneFilter.LastPlayedAt, "IFNULL(last_played_at, datetime(0))")(ctx, f) - } - })) - - query.handleCriterion(ctx, sceneTagsCriterionHandler(qb, sceneFilter.Tags)) - query.handleCriterion(ctx, sceneTagCountCriterionHandler(qb, sceneFilter.TagCount)) - query.handleCriterion(ctx, scenePerformersCriterionHandler(qb, sceneFilter.Performers)) - query.handleCriterion(ctx, scenePerformerCountCriterionHandler(qb, sceneFilter.PerformerCount)) - query.handleCriterion(ctx, studioCriterionHandler(sceneTable, sceneFilter.Studios)) - query.handleCriterion(ctx, sceneMoviesCriterionHandler(qb, sceneFilter.Movies)) - query.handleCriterion(ctx, sceneGalleriesCriterionHandler(qb, sceneFilter.Galleries)) - query.handleCriterion(ctx, scenePerformerTagsCriterionHandler(qb, sceneFilter.PerformerTags)) - query.handleCriterion(ctx, scenePerformerFavoriteCriterionHandler(sceneFilter.PerformerFavorite)) - query.handleCriterion(ctx, scenePerformerAgeCriterionHandler(sceneFilter.PerformerAge)) - query.handleCriterion(ctx, scenePhashDuplicatedCriterionHandler(sceneFilter.Duplicated, qb.addSceneFilesTable)) - query.handleCriterion(ctx, dateCriterionHandler(sceneFilter.Date, "scenes.date")) - query.handleCriterion(ctx, timestampCriterionHandler(sceneFilter.CreatedAt, "scenes.created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(sceneFilter.UpdatedAt, "scenes.updated_at")) - - return query -} - -func (qb *SceneStore) addSceneFilesTable(f *filterBuilder) { - f.addLeftJoin(scenesFilesTable, "", "scenes_files.scene_id = scenes.id") -} - -func (qb *SceneStore) addFilesTable(f *filterBuilder) { - qb.addSceneFilesTable(f) - f.addLeftJoin(fileTable, "", "scenes_files.file_id = files.id") -} - -func (qb *SceneStore) addFoldersTable(f *filterBuilder) { - qb.addFilesTable(f) - f.addLeftJoin(folderTable, "", "files.parent_folder_id = folders.id") -} - -func (qb *SceneStore) addVideoFilesTable(f *filterBuilder) { - qb.addSceneFilesTable(f) - f.addLeftJoin(videoFileTable, "", "video_files.file_id = scenes_files.file_id") -} - func (qb *SceneStore) makeQuery(ctx context.Context, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { if sceneFilter == nil { sceneFilter = &models.SceneFilterType{} @@ -1042,7 +914,7 @@ func (qb *SceneStore) makeQuery(ctx context.Context, sceneFilter *models.SceneFi findFilter = &models.FindFilterType{} } - query := qb.newQuery() + query := sceneRepository.newQuery() distinctIDs(&query, sceneTable) if q := findFilter.Q; q != nil && *q != "" { @@ -1074,10 +946,9 @@ func (qb *SceneStore) makeQuery(ctx context.Context, sceneFilter *models.SceneFi query.parseQueryString(searchColumns, *q) } - if err := qb.validateFilter(sceneFilter); err != nil { - return nil, err - } - filter := qb.makeFilter(ctx, sceneFilter) + filter := filterBuilderFromHandler(ctx, &sceneFilterHandler{ + sceneFilter: sceneFilter, + }) if err := query.addFilter(filter); err != nil { return nil, err @@ -1117,7 +988,7 @@ func (qb *SceneStore) queryGroupedFields(ctx context.Context, options models.Sce return models.NewSceneQueryResult(qb), nil } - aggregateQuery := qb.newQuery() + aggregateQuery := sceneRepository.newQuery() if options.Count { aggregateQuery.addColumn("COUNT(DISTINCT temp.id) as total") @@ -1161,7 +1032,7 @@ func (qb *SceneStore) queryGroupedFields(ctx context.Context, options models.Sce Duration null.Float Size null.Float }{} - if err := qb.repository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { + if err := sceneRepository.queryStruct(ctx, aggregateQuery.toSQL(includeSortPagination), query.args, &out); err != nil { return nil, err } @@ -1181,349 +1052,6 @@ func (qb *SceneStore) QueryCount(ctx context.Context, sceneFilter *models.SceneF return query.executeCount(ctx) } -func scenePlayCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: sceneTable, - joinTable: scenesViewDatesTable, - primaryFK: sceneIDColumn, - } - - return h.handler(count) -} - -func sceneOCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: sceneTable, - joinTable: scenesODatesTable, - primaryFK: sceneIDColumn, - } - - return h.handler(count) -} - -func sceneFileCountCriterionHandler(qb *SceneStore, fileCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: sceneTable, - joinTable: scenesFilesTable, - primaryFK: sceneIDColumn, - } - - return h.handler(fileCount) -} - -func scenePhashDuplicatedCriterionHandler(duplicatedFilter *models.PHashDuplicationCriterionInput, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - // TODO: Wishlist item: Implement Distance matching - if duplicatedFilter != nil { - if addJoinFn != nil { - addJoinFn(f) - } - - var v string - if *duplicatedFilter.Duplicated { - v = ">" - } else { - v = "=" - } - - f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "scenes_files.file_id = scph.file_id") - } - } -} - -func floatIntCriterionHandler(durationFilter *models.IntCriterionInput, column string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if durationFilter != nil { - if addJoinFn != nil { - addJoinFn(f) - } - clause, args := getIntCriterionWhereClause("cast("+column+" as int)", *durationFilter) - f.addWhere(clause, args...) - } - } -} - -func resolutionCriterionHandler(resolution *models.ResolutionCriterionInput, heightColumn string, widthColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if resolution != nil && resolution.Value.IsValid() { - if addJoinFn != nil { - addJoinFn(f) - } - - min := resolution.Value.GetMinResolution() - max := resolution.Value.GetMaxResolution() - - widthHeight := fmt.Sprintf("MIN(%s, %s)", widthColumn, heightColumn) - - switch resolution.Modifier { - case models.CriterionModifierEquals: - f.addWhere(fmt.Sprintf("%s BETWEEN %d AND %d", widthHeight, min, max)) - case models.CriterionModifierNotEquals: - f.addWhere(fmt.Sprintf("%s NOT BETWEEN %d AND %d", widthHeight, min, max)) - case models.CriterionModifierLessThan: - f.addWhere(fmt.Sprintf("%s < %d", widthHeight, min)) - case models.CriterionModifierGreaterThan: - f.addWhere(fmt.Sprintf("%s > %d", widthHeight, max)) - } - } - } -} - -func codecCriterionHandler(codec *models.StringCriterionInput, codecColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if codec != nil { - if addJoinFn != nil { - addJoinFn(f) - } - - stringCriterionHandler(codec, codecColumn)(ctx, f) - } - } -} - -func hasMarkersCriterionHandler(hasMarkers *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if hasMarkers != nil { - f.addLeftJoin("scene_markers", "", "scene_markers.scene_id = scenes.id") - if *hasMarkers == "true" { - f.addHaving("count(scene_markers.scene_id) > 0") - } else { - f.addWhere("scene_markers.id IS NULL") - } - } - } -} - -func sceneIsMissingCriterionHandler(qb *SceneStore, isMissing *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if isMissing != nil && *isMissing != "" { - switch *isMissing { - case "url": - scenesURLsTableMgr.join(f, "", "scenes.id") - f.addWhere("scene_urls.url IS NULL") - case "galleries": - qb.galleriesRepository().join(f, "galleries_join", "scenes.id") - f.addWhere("galleries_join.scene_id IS NULL") - case "studio": - f.addWhere("scenes.studio_id IS NULL") - case "movie": - qb.moviesRepository().join(f, "movies_join", "scenes.id") - f.addWhere("movies_join.scene_id IS NULL") - case "performers": - qb.performersRepository().join(f, "performers_join", "scenes.id") - f.addWhere("performers_join.scene_id IS NULL") - case "date": - f.addWhere(`scenes.date IS NULL OR scenes.date IS ""`) - case "tags": - qb.tagsRepository().join(f, "tags_join", "scenes.id") - f.addWhere("tags_join.scene_id IS NULL") - case "stash_id": - qb.stashIDRepository().join(f, "scene_stash_ids", "scenes.id") - f.addWhere("scene_stash_ids.scene_id IS NULL") - case "phash": - qb.addSceneFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") - f.addWhere("fingerprints_phash.fingerprint IS NULL") - case "cover": - f.addWhere("scenes.cover_blob IS NULL") - default: - f.addWhere("(scenes." + *isMissing + " IS NULL OR TRIM(scenes." + *isMissing + ") = '')") - } - } - } -} - -func sceneURLsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { - h := stringListCriterionHandlerBuilder{ - joinTable: scenesURLsTable, - stringColumn: sceneURLColumn, - addJoinTable: func(f *filterBuilder) { - scenesURLsTableMgr.join(f, "", "scenes.id") - }, - } - - return h.handler(url) -} - -func (qb *SceneStore) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { - return multiCriterionHandlerBuilder{ - primaryTable: sceneTable, - foreignTable: foreignTable, - joinTable: joinTable, - primaryFK: sceneIDColumn, - foreignFK: foreignFK, - addJoinsFunc: addJoinsFunc, - } -} - -func sceneCaptionCriterionHandler(qb *SceneStore, captions *models.StringCriterionInput) criterionHandlerFunc { - h := stringListCriterionHandlerBuilder{ - joinTable: videoCaptionsTable, - stringColumn: captionCodeColumn, - addJoinTable: func(f *filterBuilder) { - qb.addSceneFilesTable(f) - f.addLeftJoin(videoCaptionsTable, "", "video_captions.file_id = scenes_files.file_id") - }, - } - - return h.handler(captions) -} - -func sceneTagsCriterionHandler(qb *SceneStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - h := joinedHierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: sceneTable, - foreignTable: tagTable, - foreignFK: "tag_id", - - relationsTable: "tags_relations", - joinAs: "scene_tag", - joinTable: scenesTagsTable, - primaryFK: sceneIDColumn, - } - - return h.handler(tags) -} - -func sceneTagCountCriterionHandler(qb *SceneStore, tagCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: sceneTable, - joinTable: scenesTagsTable, - primaryFK: sceneIDColumn, - } - - return h.handler(tagCount) -} - -func scenePerformersCriterionHandler(qb *SceneStore, performers *models.MultiCriterionInput) criterionHandlerFunc { - h := joinedMultiCriterionHandlerBuilder{ - primaryTable: sceneTable, - joinTable: performersScenesTable, - joinAs: "performers_join", - primaryFK: sceneIDColumn, - foreignFK: performerIDColumn, - - addJoinTable: func(f *filterBuilder) { - qb.performersRepository().join(f, "performers_join", "scenes.id") - }, - } - - return h.handler(performers) -} - -func scenePerformerCountCriterionHandler(qb *SceneStore, performerCount *models.IntCriterionInput) criterionHandlerFunc { - h := countCriterionHandlerBuilder{ - primaryTable: sceneTable, - joinTable: performersScenesTable, - primaryFK: sceneIDColumn, - } - - return h.handler(performerCount) -} - -func scenePerformerFavoriteCriterionHandler(performerfavorite *bool) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performerfavorite != nil { - f.addLeftJoin("performers_scenes", "", "scenes.id = performers_scenes.scene_id") - - if *performerfavorite { - // contains at least one favorite - f.addLeftJoin("performers", "", "performers.id = performers_scenes.performer_id") - f.addWhere("performers.favorite = 1") - } else { - // contains zero favorites - f.addLeftJoin(`(SELECT performers_scenes.scene_id as id FROM performers_scenes -JOIN performers ON performers.id = performers_scenes.performer_id -GROUP BY performers_scenes.scene_id HAVING SUM(performers.favorite) = 0)`, "nofaves", "scenes.id = nofaves.id") - f.addWhere("performers_scenes.scene_id IS NULL OR nofaves.id IS NOT NULL") - } - } - } -} - -func scenePerformerAgeCriterionHandler(performerAge *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performerAge != nil { - f.addInnerJoin("performers_scenes", "", "scenes.id = performers_scenes.scene_id") - f.addInnerJoin("performers", "", "performers_scenes.performer_id = performers.id") - - f.addWhere("scenes.date != '' AND performers.birthdate != ''") - f.addWhere("scenes.date IS NOT NULL AND performers.birthdate IS NOT NULL") - - ageCalc := "cast(strftime('%Y.%m%d', scenes.date) - strftime('%Y.%m%d', performers.birthdate) as int)" - whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2) - f.addWhere(whereClause, args...) - } - } -} - -func sceneMoviesCriterionHandler(qb *SceneStore, movies *models.MultiCriterionInput) criterionHandlerFunc { - addJoinsFunc := func(f *filterBuilder) { - qb.moviesRepository().join(f, "", "scenes.id") - f.addLeftJoin("movies", "", "movies_scenes.movie_id = movies.id") - } - h := qb.getMultiCriterionHandlerBuilder(movieTable, moviesScenesTable, "movie_id", addJoinsFunc) - return h.handler(movies) -} - -func sceneGalleriesCriterionHandler(qb *SceneStore, galleries *models.MultiCriterionInput) criterionHandlerFunc { - addJoinsFunc := func(f *filterBuilder) { - qb.galleriesRepository().join(f, "", "scenes.id") - f.addLeftJoin("galleries", "", "scenes_galleries.gallery_id = galleries.id") - } - h := qb.getMultiCriterionHandlerBuilder(galleryTable, scenesGalleriesTable, "gallery_id", addJoinsFunc) - return h.handler(galleries) -} - -func scenePerformerTagsCriterionHandler(qb *SceneStore, tags *models.HierarchicalMultiCriterionInput) criterionHandler { - return &joinedPerformerTagsHandler{ - criterion: tags, - primaryTable: sceneTable, - joinTable: performersScenesTable, - joinPrimaryKey: sceneIDColumn, - } -} - -func scenePhashDistanceCriterionHandler(qb *SceneStore, phashDistance *models.PhashDistanceCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if phashDistance != nil { - qb.addSceneFilesTable(f) - f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") - - value, _ := utils.StringToPhash(phashDistance.Value) - distance := 0 - if phashDistance.Distance != nil { - distance = *phashDistance.Distance - } - - if distance == 0 { - // use the default handler - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), - Modifier: phashDistance.Modifier, - }, "fingerprints_phash.fingerprint", nil)(ctx, f) - } - - switch { - case phashDistance.Modifier == models.CriterionModifierEquals && distance > 0: - // needed to avoid a type mismatch - f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") - f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) < ?", value, distance) - case phashDistance.Modifier == models.CriterionModifierNotEquals && distance > 0: - // needed to avoid a type mismatch - f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") - f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) > ?", value, distance) - default: - intCriterionHandler(&models.IntCriterionInput{ - Value: int(value), - Modifier: phashDistance.Modifier, - }, "fingerprints_phash.fingerprint", nil)(ctx, f) - } - } - } -} - var sceneSortOptions = sortOptions{ "bitrate", "created_at", @@ -1533,6 +1061,7 @@ var sceneSortOptions = sortOptions{ "duration", "file_mod_time", "framerate", + "group_scene_number", "id", "interactive", "interactive_speed", @@ -1599,9 +1128,9 @@ func (qb *SceneStore) setSceneSort(query *queryBuilder, findFilter *models.FindF direction := findFilter.GetDirection() switch sort { - case "movie_scene_number": - query.join(moviesScenesTable, "", "scenes.id = movies_scenes.scene_id") - query.sortAndPagination += getSort("scene_index", direction, moviesScenesTable) + case "movie_scene_number", "group_scene_number": + query.join(groupsScenesTable, "", "scenes.id = groups_scenes.scene_id") + query.sortAndPagination += getSort("scene_index", direction, groupsScenesTable) case "tag_count": query.sortAndPagination += getCountSort(sceneTable, scenesTagsTable, sceneIDColumn, direction) case "performer_count": @@ -1692,6 +1221,30 @@ func (qb *SceneStore) SaveActivity(ctx context.Context, id int, resumeTime *floa return true, nil } +func (qb *SceneStore) ResetActivity(ctx context.Context, id int, resetResume bool, resetDuration bool) (bool, error) { + if err := qb.tableMgr.checkIDExists(ctx, id); err != nil { + return false, err + } + + record := goqu.Record{} + + if resetResume { + record["resume_time"] = 0.0 + } + + if resetDuration { + record["play_duration"] = 0.0 + } + + if len(record) > 0 { + if err := qb.tableMgr.updateByID(ctx, id, record); err != nil { + return false, err + } + } + + return true, nil +} + func (qb *SceneStore) GetURLs(ctx context.Context, sceneID int) ([]string, error) { return scenesURLsTableMgr.get(ctx, sceneID) } @@ -1719,7 +1272,7 @@ func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []mo } // assign primary only if destination has no files - existingFileIDs, err := qb.filesRepository().get(ctx, sceneID) + existingFileIDs, err := sceneRepository.files.get(ctx, sceneID) if err != nil { return err } @@ -1728,19 +1281,11 @@ func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []mo return scenesFilesTableMgr.insertJoins(ctx, sceneID, firstPrimary, fileIDs) } -func (qb *SceneStore) moviesRepository() *repository { - return &repository{ - tx: qb.tx, - tableName: moviesScenesTable, - idColumn: sceneIDColumn, - } -} - -func (qb *SceneStore) GetMovies(ctx context.Context, id int) (ret []models.MoviesScenes, err error) { - ret = []models.MoviesScenes{} +func (qb *SceneStore) GetGroups(ctx context.Context, id int) (ret []models.GroupsScenes, err error) { + ret = []models.GroupsScenes{} - if err := qb.moviesRepository().getAll(ctx, id, func(rows *sqlx.Rows) error { - var ms moviesScenesRow + if err := sceneRepository.groups.getAll(ctx, id, func(rows *sqlx.Rows) error { + var ms groupsScenesRow if err := rows.StructScan(&ms); err != nil { return err } @@ -1754,91 +1299,36 @@ func (qb *SceneStore) GetMovies(ctx context.Context, id int) (ret []models.Movie return ret, nil } -func (qb *SceneStore) filesRepository() *filesRepository { - return &filesRepository{ - repository: repository{ - tx: qb.tx, - tableName: scenesFilesTable, - idColumn: sceneIDColumn, - }, - } -} - func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } -func (qb *SceneStore) performersRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: performersScenesTable, - idColumn: sceneIDColumn, - }, - fkColumn: performerIDColumn, - } -} - func (qb *SceneStore) GetPerformerIDs(ctx context.Context, id int) ([]int, error) { - return qb.performersRepository().getIDs(ctx, id) -} - -func (qb *SceneStore) tagsRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: scenesTagsTable, - idColumn: sceneIDColumn, - }, - fkColumn: tagIDColumn, - foreignTable: tagTable, - orderBy: "tags.name ASC", - } + return sceneRepository.performers.getIDs(ctx, id) } func (qb *SceneStore) GetTagIDs(ctx context.Context, id int) ([]int, error) { - return qb.tagsRepository().getIDs(ctx, id) -} - -func (qb *SceneStore) galleriesRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: scenesGalleriesTable, - idColumn: sceneIDColumn, - }, - fkColumn: galleryIDColumn, - } + return sceneRepository.tags.getIDs(ctx, id) } func (qb *SceneStore) GetGalleryIDs(ctx context.Context, id int) ([]int, error) { - return qb.galleriesRepository().getIDs(ctx, id) + return sceneRepository.galleries.getIDs(ctx, id) } func (qb *SceneStore) AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error { return scenesGalleriesTableMgr.addJoins(ctx, sceneID, galleryIDs) } -func (qb *SceneStore) stashIDRepository() *stashIDRepository { - return &stashIDRepository{ - repository{ - tx: qb.tx, - tableName: "scene_stash_ids", - idColumn: sceneIDColumn, - }, - } -} - func (qb *SceneStore) GetStashIDs(ctx context.Context, sceneID int) ([]models.StashID, error) { - return qb.stashIDRepository().get(ctx, sceneID) + return sceneRepository.stashIDs.get(ctx, sceneID) } func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*models.Scene, error) { var dupeIds [][]int if distance == 0 { var ids []string - if err := qb.tx.Select(ctx, &ids, findExactDuplicateQuery, durationDiff); err != nil { + if err := dbWrapper.Select(ctx, &ids, findExactDuplicateQuery, durationDiff); err != nil { return nil, err } @@ -1858,7 +1348,7 @@ func (qb *SceneStore) FindDuplicates(ctx context.Context, distance int, duration } else { var hashes []*utils.Phash - if err := qb.queryFunc(ctx, findAllPhashesQuery, nil, false, func(rows *sqlx.Rows) error { + if err := sceneRepository.queryFunc(ctx, findAllPhashesQuery, nil, false, func(rows *sqlx.Rows) error { phash := utils.Phash{ Bucket: -1, Duration: -1, diff --git a/pkg/sqlite/scene_filter.go b/pkg/sqlite/scene_filter.go new file mode 100644 index 00000000000..2e63dad975f --- /dev/null +++ b/pkg/sqlite/scene_filter.go @@ -0,0 +1,568 @@ +package sqlite + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/utils" +) + +type sceneFilterHandler struct { + sceneFilter *models.SceneFilterType +} + +func (qb *sceneFilterHandler) validate() error { + sceneFilter := qb.sceneFilter + if sceneFilter == nil { + return nil + } + + if err := validateFilterCombination(sceneFilter.OperatorFilter); err != nil { + return err + } + + if subFilter := sceneFilter.SubFilter(); subFilter != nil { + sqb := &sceneFilterHandler{sceneFilter: subFilter} + if err := sqb.validate(); err != nil { + return err + } + } + + return nil +} + +func (qb *sceneFilterHandler) handle(ctx context.Context, f *filterBuilder) { + sceneFilter := qb.sceneFilter + if sceneFilter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + sf := sceneFilter.SubFilter() + if sf != nil { + sub := &sceneFilterHandler{sf} + handleSubFilter(ctx, sub, f, sceneFilter.OperatorFilter) + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +func (qb *sceneFilterHandler) criterionHandler() criterionHandler { + sceneFilter := qb.sceneFilter + return compoundHandler{ + intCriterionHandler(sceneFilter.ID, "scenes.id", nil), + pathCriterionHandler(sceneFilter.Path, "folders.path", "files.basename", qb.addFoldersTable), + qb.fileCountCriterionHandler(sceneFilter.FileCount), + stringCriterionHandler(sceneFilter.Title, "scenes.title"), + stringCriterionHandler(sceneFilter.Code, "scenes.code"), + stringCriterionHandler(sceneFilter.Details, "scenes.details"), + stringCriterionHandler(sceneFilter.Director, "scenes.director"), + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if sceneFilter.Oshash != nil { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_oshash", "scenes_files.file_id = fingerprints_oshash.file_id AND fingerprints_oshash.type = 'oshash'") + } + + stringCriterionHandler(sceneFilter.Oshash, "fingerprints_oshash.fingerprint")(ctx, f) + }), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if sceneFilter.Checksum != nil { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_md5", "scenes_files.file_id = fingerprints_md5.file_id AND fingerprints_md5.type = 'md5'") + } + + stringCriterionHandler(sceneFilter.Checksum, "fingerprints_md5.fingerprint")(ctx, f) + }), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if sceneFilter.Phash != nil { + // backwards compatibility + qb.phashDistanceCriterionHandler(&models.PhashDistanceCriterionInput{ + Value: sceneFilter.Phash.Value, + Modifier: sceneFilter.Phash.Modifier, + })(ctx, f) + } + }), + + qb.phashDistanceCriterionHandler(sceneFilter.PhashDistance), + + intCriterionHandler(sceneFilter.Rating100, "scenes.rating", nil), + qb.oCountCriterionHandler(sceneFilter.OCounter), + boolCriterionHandler(sceneFilter.Organized, "scenes.organized", nil), + + floatIntCriterionHandler(sceneFilter.Duration, "video_files.duration", qb.addVideoFilesTable), + resolutionCriterionHandler(sceneFilter.Resolution, "video_files.height", "video_files.width", qb.addVideoFilesTable), + orientationCriterionHandler(sceneFilter.Orientation, "video_files.height", "video_files.width", qb.addVideoFilesTable), + floatIntCriterionHandler(sceneFilter.Framerate, "ROUND(video_files.frame_rate)", qb.addVideoFilesTable), + intCriterionHandler(sceneFilter.Bitrate, "video_files.bit_rate", qb.addVideoFilesTable), + qb.codecCriterionHandler(sceneFilter.VideoCodec, "video_files.video_codec", qb.addVideoFilesTable), + qb.codecCriterionHandler(sceneFilter.AudioCodec, "video_files.audio_codec", qb.addVideoFilesTable), + + qb.hasMarkersCriterionHandler(sceneFilter.HasMarkers), + qb.isMissingCriterionHandler(sceneFilter.IsMissing), + qb.urlsCriterionHandler(sceneFilter.URL), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if sceneFilter.StashID != nil { + sceneRepository.stashIDs.join(f, "scene_stash_ids", "scenes.id") + stringCriterionHandler(sceneFilter.StashID, "scene_stash_ids.stash_id")(ctx, f) + } + }), + + &stashIDCriterionHandler{ + c: sceneFilter.StashIDEndpoint, + stashIDRepository: &sceneRepository.stashIDs, + stashIDTableAs: "scene_stash_ids", + parentIDCol: "scenes.id", + }, + + boolCriterionHandler(sceneFilter.Interactive, "video_files.interactive", qb.addVideoFilesTable), + intCriterionHandler(sceneFilter.InteractiveSpeed, "video_files.interactive_speed", qb.addVideoFilesTable), + + qb.captionCriterionHandler(sceneFilter.Captions), + + floatIntCriterionHandler(sceneFilter.ResumeTime, "scenes.resume_time", nil), + floatIntCriterionHandler(sceneFilter.PlayDuration, "scenes.play_duration", nil), + qb.playCountCriterionHandler(sceneFilter.PlayCount), + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if sceneFilter.LastPlayedAt != nil { + f.addLeftJoin( + fmt.Sprintf("(SELECT %s, MAX(%s) as last_played_at FROM %s GROUP BY %s)", sceneIDColumn, sceneViewDateColumn, scenesViewDatesTable, sceneIDColumn), + "scene_last_view", + fmt.Sprintf("scene_last_view.%s = scenes.id", sceneIDColumn), + ) + h := timestampCriterionHandler{sceneFilter.LastPlayedAt, "IFNULL(last_played_at, datetime(0))", nil} + h.handle(ctx, f) + } + }), + + qb.tagsCriterionHandler(sceneFilter.Tags), + qb.tagCountCriterionHandler(sceneFilter.TagCount), + qb.performersCriterionHandler(sceneFilter.Performers), + qb.performerCountCriterionHandler(sceneFilter.PerformerCount), + studioCriterionHandler(sceneTable, sceneFilter.Studios), + + qb.groupsCriterionHandler(sceneFilter.Groups), + qb.moviesCriterionHandler(sceneFilter.Movies), + + qb.galleriesCriterionHandler(sceneFilter.Galleries), + qb.performerTagsCriterionHandler(sceneFilter.PerformerTags), + qb.performerFavoriteCriterionHandler(sceneFilter.PerformerFavorite), + qb.performerAgeCriterionHandler(sceneFilter.PerformerAge), + qb.phashDuplicatedCriterionHandler(sceneFilter.Duplicated, qb.addSceneFilesTable), + &dateCriterionHandler{sceneFilter.Date, "scenes.date", nil}, + ×tampCriterionHandler{sceneFilter.CreatedAt, "scenes.created_at", nil}, + ×tampCriterionHandler{sceneFilter.UpdatedAt, "scenes.updated_at", nil}, + + &relatedFilterHandler{ + relatedIDCol: "scenes_galleries.gallery_id", + relatedRepo: galleryRepository.repository, + relatedHandler: &galleryFilterHandler{sceneFilter.GalleriesFilter}, + joinFn: func(f *filterBuilder) { + sceneRepository.galleries.innerJoin(f, "", "scenes.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "performers_join.performer_id", + relatedRepo: performerRepository.repository, + relatedHandler: &performerFilterHandler{sceneFilter.PerformersFilter}, + joinFn: func(f *filterBuilder) { + sceneRepository.performers.innerJoin(f, "performers_join", "scenes.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "scenes.studio_id", + relatedRepo: studioRepository.repository, + relatedHandler: &studioFilterHandler{sceneFilter.StudiosFilter}, + }, + + &relatedFilterHandler{ + relatedIDCol: "scene_tag.tag_id", + relatedRepo: tagRepository.repository, + relatedHandler: &tagFilterHandler{sceneFilter.TagsFilter}, + joinFn: func(f *filterBuilder) { + sceneRepository.tags.innerJoin(f, "scene_tag", "scenes.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "groups_scenes.group_id", + relatedRepo: groupRepository.repository, + relatedHandler: &groupFilterHandler{sceneFilter.MoviesFilter}, + joinFn: func(f *filterBuilder) { + sceneRepository.groups.innerJoin(f, "", "scenes.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "scene_markers.id", + relatedRepo: sceneMarkerRepository.repository, + relatedHandler: &sceneMarkerFilterHandler{sceneFilter.MarkersFilter}, + joinFn: func(f *filterBuilder) { + f.addInnerJoin("scene_markers", "", "scenes.id") + }, + }, + } +} + +func (qb *sceneFilterHandler) addSceneFilesTable(f *filterBuilder) { + f.addLeftJoin(scenesFilesTable, "", "scenes_files.scene_id = scenes.id") +} + +func (qb *sceneFilterHandler) addFilesTable(f *filterBuilder) { + qb.addSceneFilesTable(f) + f.addLeftJoin(fileTable, "", "scenes_files.file_id = files.id") +} + +func (qb *sceneFilterHandler) addFoldersTable(f *filterBuilder) { + qb.addFilesTable(f) + f.addLeftJoin(folderTable, "", "files.parent_folder_id = folders.id") +} + +func (qb *sceneFilterHandler) addVideoFilesTable(f *filterBuilder) { + qb.addSceneFilesTable(f) + f.addLeftJoin(videoFileTable, "", "video_files.file_id = scenes_files.file_id") +} + +func (qb *sceneFilterHandler) playCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: scenesViewDatesTable, + primaryFK: sceneIDColumn, + } + + return h.handler(count) +} + +func (qb *sceneFilterHandler) oCountCriterionHandler(count *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: scenesODatesTable, + primaryFK: sceneIDColumn, + } + + return h.handler(count) +} + +func (qb *sceneFilterHandler) fileCountCriterionHandler(fileCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: scenesFilesTable, + primaryFK: sceneIDColumn, + } + + return h.handler(fileCount) +} + +func (qb *sceneFilterHandler) phashDuplicatedCriterionHandler(duplicatedFilter *models.PHashDuplicationCriterionInput, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + // TODO: Wishlist item: Implement Distance matching + if duplicatedFilter != nil { + if addJoinFn != nil { + addJoinFn(f) + } + + var v string + if *duplicatedFilter.Duplicated { + v = ">" + } else { + v = "=" + } + + f.addInnerJoin("(SELECT file_id FROM files_fingerprints INNER JOIN (SELECT fingerprint FROM files_fingerprints WHERE type = 'phash' GROUP BY fingerprint HAVING COUNT (fingerprint) "+v+" 1) dupes on files_fingerprints.fingerprint = dupes.fingerprint)", "scph", "scenes_files.file_id = scph.file_id") + } + } +} + +func (qb *sceneFilterHandler) codecCriterionHandler(codec *models.StringCriterionInput, codecColumn string, addJoinFn func(f *filterBuilder)) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if codec != nil { + if addJoinFn != nil { + addJoinFn(f) + } + + stringCriterionHandler(codec, codecColumn)(ctx, f) + } + } +} + +func (qb *sceneFilterHandler) hasMarkersCriterionHandler(hasMarkers *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if hasMarkers != nil { + f.addLeftJoin("scene_markers", "", "scene_markers.scene_id = scenes.id") + if *hasMarkers == "true" { + f.addHaving("count(scene_markers.scene_id) > 0") + } else { + f.addWhere("scene_markers.id IS NULL") + } + } + } +} + +func (qb *sceneFilterHandler) isMissingCriterionHandler(isMissing *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "url": + scenesURLsTableMgr.join(f, "", "scenes.id") + f.addWhere("scene_urls.url IS NULL") + case "galleries": + sceneRepository.galleries.join(f, "galleries_join", "scenes.id") + f.addWhere("galleries_join.scene_id IS NULL") + case "studio": + f.addWhere("scenes.studio_id IS NULL") + case "movie": + sceneRepository.groups.join(f, "groups_join", "scenes.id") + f.addWhere("groups_join.scene_id IS NULL") + case "performers": + sceneRepository.performers.join(f, "performers_join", "scenes.id") + f.addWhere("performers_join.scene_id IS NULL") + case "date": + f.addWhere(`scenes.date IS NULL OR scenes.date IS ""`) + case "tags": + sceneRepository.tags.join(f, "tags_join", "scenes.id") + f.addWhere("tags_join.scene_id IS NULL") + case "stash_id": + sceneRepository.stashIDs.join(f, "scene_stash_ids", "scenes.id") + f.addWhere("scene_stash_ids.scene_id IS NULL") + case "phash": + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + f.addWhere("fingerprints_phash.fingerprint IS NULL") + case "cover": + f.addWhere("scenes.cover_blob IS NULL") + default: + f.addWhere("(scenes." + *isMissing + " IS NULL OR TRIM(scenes." + *isMissing + ") = '')") + } + } + } +} + +func (qb *sceneFilterHandler) urlsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: sceneTable, + primaryFK: sceneIDColumn, + joinTable: scenesURLsTable, + stringColumn: sceneURLColumn, + addJoinTable: func(f *filterBuilder) { + scenesURLsTableMgr.join(f, "", "scenes.id") + }, + } + + return h.handler(url) +} + +func (qb *sceneFilterHandler) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { + return multiCriterionHandlerBuilder{ + primaryTable: sceneTable, + foreignTable: foreignTable, + joinTable: joinTable, + primaryFK: sceneIDColumn, + foreignFK: foreignFK, + addJoinsFunc: addJoinsFunc, + } +} + +func (qb *sceneFilterHandler) captionCriterionHandler(captions *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: sceneTable, + primaryFK: sceneIDColumn, + joinTable: videoCaptionsTable, + stringColumn: captionCodeColumn, + addJoinTable: func(f *filterBuilder) { + qb.addSceneFilesTable(f) + f.addLeftJoin(videoCaptionsTable, "", "video_captions.file_id = scenes_files.file_id") + }, + excludeHandler: func(f *filterBuilder, criterion *models.StringCriterionInput) { + excludeClause := `scenes.id NOT IN ( + SELECT scenes_files.scene_id from scenes_files + INNER JOIN video_captions on video_captions.file_id = scenes_files.file_id + WHERE video_captions.language_code LIKE ? + )` + f.addWhere(excludeClause, criterion.Value) + + // TODO - should we also exclude null values? + }, + } + + return h.handler(captions) +} + +func (qb *sceneFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: sceneTable, + foreignTable: tagTable, + foreignFK: "tag_id", + + relationsTable: "tags_relations", + joinAs: "scene_tag", + joinTable: scenesTagsTable, + primaryFK: sceneIDColumn, + } + + return h.handler(tags) +} + +func (qb *sceneFilterHandler) tagCountCriterionHandler(tagCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: scenesTagsTable, + primaryFK: sceneIDColumn, + } + + return h.handler(tagCount) +} + +func (qb *sceneFilterHandler) performersCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { + h := joinedMultiCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: performersScenesTable, + joinAs: "performers_join", + primaryFK: sceneIDColumn, + foreignFK: performerIDColumn, + + addJoinTable: func(f *filterBuilder) { + sceneRepository.performers.join(f, "performers_join", "scenes.id") + }, + } + + return h.handler(performers) +} + +func (qb *sceneFilterHandler) performerCountCriterionHandler(performerCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: performersScenesTable, + primaryFK: sceneIDColumn, + } + + return h.handler(performerCount) +} + +func (qb *sceneFilterHandler) performerFavoriteCriterionHandler(performerfavorite *bool) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performerfavorite != nil { + f.addLeftJoin("performers_scenes", "", "scenes.id = performers_scenes.scene_id") + + if *performerfavorite { + // contains at least one favorite + f.addLeftJoin("performers", "", "performers.id = performers_scenes.performer_id") + f.addWhere("performers.favorite = 1") + } else { + // contains zero favorites + f.addLeftJoin(`(SELECT performers_scenes.scene_id as id FROM performers_scenes +JOIN performers ON performers.id = performers_scenes.performer_id +GROUP BY performers_scenes.scene_id HAVING SUM(performers.favorite) = 0)`, "nofaves", "scenes.id = nofaves.id") + f.addWhere("performers_scenes.scene_id IS NULL OR nofaves.id IS NOT NULL") + } + } + } +} + +func (qb *sceneFilterHandler) performerAgeCriterionHandler(performerAge *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performerAge != nil { + f.addInnerJoin("performers_scenes", "", "scenes.id = performers_scenes.scene_id") + f.addInnerJoin("performers", "", "performers_scenes.performer_id = performers.id") + + f.addWhere("scenes.date != '' AND performers.birthdate != ''") + f.addWhere("scenes.date IS NOT NULL AND performers.birthdate IS NOT NULL") + + ageCalc := "cast(strftime('%Y.%m%d', scenes.date) - strftime('%Y.%m%d', performers.birthdate) as int)" + whereClause, args := getIntWhereClause(ageCalc, performerAge.Modifier, performerAge.Value, performerAge.Value2) + f.addWhere(whereClause, args...) + } + } +} + +// legacy handler +func (qb *sceneFilterHandler) moviesCriterionHandler(movies *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + sceneRepository.groups.join(f, "", "scenes.id") + f.addLeftJoin("groups", "", "groups_scenes.group_id = groups.id") + } + h := qb.getMultiCriterionHandlerBuilder(groupTable, groupsScenesTable, "group_id", addJoinsFunc) + return h.handler(movies) +} + +func (qb *sceneFilterHandler) groupsCriterionHandler(groups *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: sceneTable, + foreignTable: groupTable, + foreignFK: "group_id", + + relationsTable: groupRelationsTable, + parentFK: "containing_id", + childFK: "sub_id", + joinAs: "scene_group", + joinTable: groupsScenesTable, + primaryFK: sceneIDColumn, + } + + return h.handler(groups) +} + +func (qb *sceneFilterHandler) galleriesCriterionHandler(galleries *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + sceneRepository.galleries.join(f, "", "scenes.id") + f.addLeftJoin("galleries", "", "scenes_galleries.gallery_id = galleries.id") + } + h := qb.getMultiCriterionHandlerBuilder(galleryTable, scenesGalleriesTable, "gallery_id", addJoinsFunc) + return h.handler(galleries) +} + +func (qb *sceneFilterHandler) performerTagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandler { + return &joinedPerformerTagsHandler{ + criterion: tags, + primaryTable: sceneTable, + joinTable: performersScenesTable, + joinPrimaryKey: sceneIDColumn, + } +} + +func (qb *sceneFilterHandler) phashDistanceCriterionHandler(phashDistance *models.PhashDistanceCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if phashDistance != nil { + qb.addSceneFilesTable(f) + f.addLeftJoin(fingerprintTable, "fingerprints_phash", "scenes_files.file_id = fingerprints_phash.file_id AND fingerprints_phash.type = 'phash'") + + value, _ := utils.StringToPhash(phashDistance.Value) + distance := 0 + if phashDistance.Distance != nil { + distance = *phashDistance.Distance + } + + if distance == 0 { + // use the default handler + intCriterionHandler(&models.IntCriterionInput{ + Value: int(value), + Modifier: phashDistance.Modifier, + }, "fingerprints_phash.fingerprint", nil)(ctx, f) + } + + switch { + case phashDistance.Modifier == models.CriterionModifierEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) < ?", value, distance) + case phashDistance.Modifier == models.CriterionModifierNotEquals && distance > 0: + // needed to avoid a type mismatch + f.addWhere("typeof(fingerprints_phash.fingerprint) = 'integer'") + f.addWhere("phash_distance(fingerprints_phash.fingerprint, ?) > ?", value, distance) + default: + intCriterionHandler(&models.IntCriterionInput{ + Value: int(value), + Modifier: phashDistance.Modifier, + }, "fingerprints_phash.fingerprint", nil)(ctx, f) + } + } + } +} diff --git a/pkg/sqlite/scene_marker.go b/pkg/sqlite/scene_marker.go index f1221cd0e13..87a849d2084 100644 --- a/pkg/sqlite/scene_marker.go +++ b/pkg/sqlite/scene_marker.go @@ -75,24 +75,41 @@ func (r *sceneMarkerRowRecord) fromPartial(o models.SceneMarkerPartial) { r.setTimestamp("updated_at", o.UpdatedAt) } -type SceneMarkerStore struct { +type sceneMarkerRepositoryType struct { repository - tableMgr *table + scenes repository + tags joinRepository } -func NewSceneMarkerStore() *SceneMarkerStore { - return &SceneMarkerStore{ +var ( + sceneMarkerRepository = sceneMarkerRepositoryType{ repository: repository{ tableName: sceneMarkerTable, idColumn: idColumn, }, - tableMgr: sceneMarkerTableMgr, + scenes: repository{ + tableName: sceneTable, + idColumn: idColumn, + }, + tags: joinRepository{ + repository: repository{ + tableName: "scene_markers_tags", + idColumn: "scene_marker_id", + }, + fkColumn: tagIDColumn, + }, } +) + +type SceneMarkerStore struct{} + +func NewSceneMarkerStore() *SceneMarkerStore { + return &SceneMarkerStore{} } func (qb *SceneMarkerStore) table() exp.IdentifierExpression { - return qb.tableMgr.table + return sceneMarkerTableMgr.table } func (qb *SceneMarkerStore) selectDataset() *goqu.SelectDataset { @@ -103,7 +120,7 @@ func (qb *SceneMarkerStore) Create(ctx context.Context, newObject *models.SceneM var r sceneMarkerRow r.fromSceneMarker(*newObject) - id, err := qb.tableMgr.insertID(ctx, r) + id, err := sceneMarkerTableMgr.insertID(ctx, r) if err != nil { return err } @@ -128,7 +145,7 @@ func (qb *SceneMarkerStore) UpdatePartial(ctx context.Context, id int, partial m r.fromPartial(partial) if len(r.Record) > 0 { - if err := qb.tableMgr.updateByID(ctx, id, r.Record); err != nil { + if err := sceneMarkerTableMgr.updateByID(ctx, id, r.Record); err != nil { return nil, err } } @@ -140,7 +157,7 @@ func (qb *SceneMarkerStore) Update(ctx context.Context, updatedObject *models.Sc var r sceneMarkerRow r.fromSceneMarker(*updatedObject) - if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { + if err := sceneMarkerTableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { return err } @@ -148,7 +165,7 @@ func (qb *SceneMarkerStore) Update(ctx context.Context, updatedObject *models.Sc } func (qb *SceneMarkerStore) Destroy(ctx context.Context, id int) error { - return qb.destroyExisting(ctx, []int{id}) + return sceneMarkerRepository.destroyExisting(ctx, []int{id}) } // returns nil, nil if not found @@ -186,7 +203,7 @@ func (qb *SceneMarkerStore) FindMany(ctx context.Context, ids []int) ([]*models. // returns nil, sql.ErrNoRows if not found func (qb *SceneMarkerStore) find(ctx context.Context, id int) (*models.SceneMarker, error) { - q := qb.selectDataset().Where(qb.tableMgr.byID(id)) + q := qb.selectDataset().Where(sceneMarkerTableMgr.byID(id)) ret, err := qb.get(ctx, q) if err != nil { @@ -243,7 +260,7 @@ func (qb *SceneMarkerStore) FindBySceneID(ctx context.Context, sceneID int) ([]* func (qb *SceneMarkerStore) CountByTagID(ctx context.Context, tagID int) (int, error) { args := []interface{}{tagID, tagID} - return qb.runCountQuery(ctx, qb.buildCountQuery(countSceneMarkersForTagQuery), args) + return sceneMarkerRepository.runCountQuery(ctx, sceneMarkerRepository.buildCountQuery(countSceneMarkersForTagQuery), args) } func (qb *SceneMarkerStore) GetMarkerStrings(ctx context.Context, q *string, sort *string) ([]*models.MarkerStringsResultType, error) { @@ -272,21 +289,6 @@ func (qb *SceneMarkerStore) Wall(ctx context.Context, q *string) ([]*models.Scen return qb.getMany(ctx, qq) } -func (qb *SceneMarkerStore) makeFilter(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType) *filterBuilder { - query := &filterBuilder{} - - query.handleCriterion(ctx, sceneMarkerTagIDCriterionHandler(qb, sceneMarkerFilter.TagID)) - query.handleCriterion(ctx, sceneMarkerTagsCriterionHandler(qb, sceneMarkerFilter.Tags)) - query.handleCriterion(ctx, sceneMarkerSceneTagsCriterionHandler(qb, sceneMarkerFilter.SceneTags)) - query.handleCriterion(ctx, sceneMarkerPerformersCriterionHandler(qb, sceneMarkerFilter.Performers)) - query.handleCriterion(ctx, timestampCriterionHandler(sceneMarkerFilter.CreatedAt, "scene_markers.created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(sceneMarkerFilter.UpdatedAt, "scene_markers.updated_at")) - query.handleCriterion(ctx, dateCriterionHandler(sceneMarkerFilter.SceneDate, "scenes.date")) - query.handleCriterion(ctx, timestampCriterionHandler(sceneMarkerFilter.SceneCreatedAt, "scenes.created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(sceneMarkerFilter.SceneUpdatedAt, "scenes.updated_at")) - - return query -} func (qb *SceneMarkerStore) makeQuery(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { if sceneMarkerFilter == nil { sceneMarkerFilter = &models.SceneMarkerFilterType{} @@ -295,16 +297,19 @@ func (qb *SceneMarkerStore) makeQuery(ctx context.Context, sceneMarkerFilter *mo findFilter = &models.FindFilterType{} } - query := qb.newQuery() + query := sceneMarkerRepository.newQuery() distinctIDs(&query, sceneMarkerTable) if q := findFilter.Q; q != nil && *q != "" { + query.join(sceneTable, "", "scenes.id = scene_markers.scene_id") query.join(tagTable, "", "scene_markers.primary_tag_id = tags.id") searchColumns := []string{"scene_markers.title", "scenes.title", "tags.name"} query.parseQueryString(searchColumns, *q) } - filter := qb.makeFilter(ctx, sceneMarkerFilter) + filter := filterBuilderFromHandler(ctx, &sceneMarkerFilterHandler{ + sceneMarkerFilter: sceneMarkerFilter, + }) if err := query.addFilter(filter); err != nil { return nil, err @@ -346,135 +351,6 @@ func (qb *SceneMarkerStore) QueryCount(ctx context.Context, sceneMarkerFilter *m return query.executeCount(ctx) } -func sceneMarkerTagIDCriterionHandler(qb *SceneMarkerStore, tagID *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if tagID != nil { - f.addLeftJoin("scene_markers_tags", "", "scene_markers_tags.scene_marker_id = scene_markers.id") - - f.addWhere("(scene_markers.primary_tag_id = ? OR scene_markers_tags.tag_id = ?)", *tagID, *tagID) - } - } -} - -func sceneMarkerTagsCriterionHandler(qb *SceneMarkerStore, criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { - tags := criterion.CombineExcludes() - - if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { - var notClause string - if tags.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin("scene_markers_tags", "", "scene_markers.id = scene_markers_tags.scene_marker_id") - - f.addWhere(fmt.Sprintf("%s scene_markers_tags.tag_id IS NULL", notClause)) - return - } - - if tags.Modifier == models.CriterionModifierEquals && tags.Depth != nil && *tags.Depth != 0 { - f.setError(fmt.Errorf("depth is not supported for equals modifier for marker tag filtering")) - return - } - - if len(tags.Value) == 0 && len(tags.Excludes) == 0 { - return - } - - if len(tags.Value) > 0 { - valuesClause, err := getHierarchicalValues(ctx, qb.tx, tags.Value, tagTable, "tags_relations", "parent_id", "child_id", tags.Depth) - if err != nil { - f.setError(err) - return - } - - f.addWith(`marker_tags AS ( - SELECT mt.scene_marker_id, t.column1 AS root_tag_id FROM scene_markers_tags mt - INNER JOIN (` + valuesClause + `) t ON t.column2 = mt.tag_id - UNION - SELECT m.id, t.column1 FROM scene_markers m - INNER JOIN (` + valuesClause + `) t ON t.column2 = m.primary_tag_id - )`) - - f.addLeftJoin("marker_tags", "", "marker_tags.scene_marker_id = scene_markers.id") - - switch tags.Modifier { - case models.CriterionModifierEquals: - // includes only the provided ids - f.addWhere("marker_tags.root_tag_id IS NOT NULL") - tagsLen := len(tags.Value) - f.addHaving(fmt.Sprintf("count(distinct marker_tags.root_tag_id) IS %d", tagsLen)) - // decrement by one to account for primary tag id - f.addWhere("(SELECT COUNT(*) FROM scene_markers_tags s WHERE s.scene_marker_id = scene_markers.id) = ?", tagsLen-1) - case models.CriterionModifierNotEquals: - f.setError(fmt.Errorf("not equals modifier is not supported for scene marker tags")) - default: - addHierarchicalConditionClauses(f, tags, "marker_tags", "root_tag_id") - } - } - - if len(criterion.Excludes) > 0 { - valuesClause, err := getHierarchicalValues(ctx, dbWrapper{}, tags.Excludes, tagTable, "tags_relations", "parent_id", "child_id", tags.Depth) - if err != nil { - f.setError(err) - return - } - - clause := "scene_markers.id NOT IN (SELECT scene_markers_tags.scene_marker_id FROM scene_markers_tags WHERE scene_markers_tags.tag_id IN (SELECT column2 FROM (%s)))" - f.addWhere(fmt.Sprintf(clause, valuesClause)) - - f.addWhere(fmt.Sprintf("scene_markers.primary_tag_id NOT IN (SELECT column2 FROM (%s))", valuesClause)) - } - } - } -} - -func sceneMarkerSceneTagsCriterionHandler(qb *SceneMarkerStore, tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if tags != nil { - f.addLeftJoin("scenes_tags", "", "scene_markers.scene_id = scenes_tags.scene_id") - - h := joinedHierarchicalMultiCriterionHandlerBuilder{ - tx: qb.tx, - - primaryTable: "scene_markers", - primaryKey: sceneIDColumn, - foreignTable: tagTable, - foreignFK: tagIDColumn, - - relationsTable: "tags_relations", - joinTable: "scenes_tags", - joinAs: "marker_scenes_tags", - primaryFK: sceneIDColumn, - } - - h.handler(tags).handle(ctx, f) - } - } -} - -func sceneMarkerPerformersCriterionHandler(qb *SceneMarkerStore, performers *models.MultiCriterionInput) criterionHandlerFunc { - h := joinedMultiCriterionHandlerBuilder{ - primaryTable: sceneTable, - joinTable: performersScenesTable, - joinAs: "performers_join", - primaryFK: sceneIDColumn, - foreignFK: performerIDColumn, - - addJoinTable: func(f *filterBuilder) { - f.addLeftJoin(performersScenesTable, "performers_join", "performers_join.scene_id = scene_markers.scene_id") - }, - } - - handler := h.handler(performers) - return func(ctx context.Context, f *filterBuilder) { - // Make sure scenes is included, otherwise excludes filter fails - f.addLeftJoin(sceneTable, "", "scenes.id = scene_markers.scene_id") - handler(ctx, f) - } -} - var sceneMarkerSortOptions = sortOptions{ "created_at", "id", @@ -514,7 +390,7 @@ func (qb *SceneMarkerStore) setSceneMarkerSort(query *queryBuilder, findFilter * func (qb *SceneMarkerStore) querySceneMarkers(ctx context.Context, query string, args []interface{}) ([]*models.SceneMarker, error) { const single = false var ret []*models.SceneMarker - if err := qb.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { + if err := sceneMarkerRepository.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { var f sceneMarkerRow if err := r.StructScan(&f); err != nil { return err @@ -532,7 +408,7 @@ func (qb *SceneMarkerStore) querySceneMarkers(ctx context.Context, query string, } func (qb *SceneMarkerStore) queryMarkerStringsResultType(ctx context.Context, query string, args []interface{}) ([]*models.MarkerStringsResultType, error) { - rows, err := qb.tx.Queryx(ctx, query, args...) + rows, err := dbWrapper.Queryx(ctx, query, args...) if err != nil && !errors.Is(err, sql.ErrNoRows) { return nil, err } @@ -554,24 +430,13 @@ func (qb *SceneMarkerStore) queryMarkerStringsResultType(ctx context.Context, qu return markerStrings, nil } -func (qb *SceneMarkerStore) tagsRepository() *joinRepository { - return &joinRepository{ - repository: repository{ - tx: qb.tx, - tableName: "scene_markers_tags", - idColumn: "scene_marker_id", - }, - fkColumn: tagIDColumn, - } -} - func (qb *SceneMarkerStore) GetTagIDs(ctx context.Context, id int) ([]int, error) { - return qb.tagsRepository().getIDs(ctx, id) + return sceneMarkerRepository.tags.getIDs(ctx, id) } func (qb *SceneMarkerStore) UpdateTags(ctx context.Context, id int, tagIDs []int) error { // Delete the existing joins and then create new ones - return qb.tagsRepository().replace(ctx, id, tagIDs) + return sceneMarkerRepository.tags.replace(ctx, id, tagIDs) } func (qb *SceneMarkerStore) Count(ctx context.Context) (int, error) { diff --git a/pkg/sqlite/scene_marker_filter.go b/pkg/sqlite/scene_marker_filter.go new file mode 100644 index 00000000000..d5e044e85a7 --- /dev/null +++ b/pkg/sqlite/scene_marker_filter.go @@ -0,0 +1,205 @@ +package sqlite + +import ( + "context" + "fmt" + + "github.com/stashapp/stash/pkg/models" +) + +type sceneMarkerFilterHandler struct { + sceneMarkerFilter *models.SceneMarkerFilterType +} + +func (qb *sceneMarkerFilterHandler) validate() error { + return nil +} + +func (qb *sceneMarkerFilterHandler) handle(ctx context.Context, f *filterBuilder) { + sceneMarkerFilter := qb.sceneMarkerFilter + if sceneMarkerFilter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +func (qb *sceneMarkerFilterHandler) joinScenes(f *filterBuilder) { + sceneMarkerRepository.scenes.innerJoin(f, "", "scene_markers.scene_id") +} + +func (qb *sceneMarkerFilterHandler) criterionHandler() criterionHandler { + sceneMarkerFilter := qb.sceneMarkerFilter + return compoundHandler{ + qb.tagIDCriterionHandler(sceneMarkerFilter.TagID), + qb.tagsCriterionHandler(sceneMarkerFilter.Tags), + qb.sceneTagsCriterionHandler(sceneMarkerFilter.SceneTags), + qb.performersCriterionHandler(sceneMarkerFilter.Performers), + qb.scenesCriterionHandler(sceneMarkerFilter.Scenes), + ×tampCriterionHandler{sceneMarkerFilter.CreatedAt, "scene_markers.created_at", nil}, + ×tampCriterionHandler{sceneMarkerFilter.UpdatedAt, "scene_markers.updated_at", nil}, + &dateCriterionHandler{sceneMarkerFilter.SceneDate, "scenes.date", qb.joinScenes}, + ×tampCriterionHandler{sceneMarkerFilter.SceneCreatedAt, "scenes.created_at", qb.joinScenes}, + ×tampCriterionHandler{sceneMarkerFilter.SceneUpdatedAt, "scenes.updated_at", qb.joinScenes}, + + &relatedFilterHandler{ + relatedIDCol: "scenes.id", + relatedRepo: sceneRepository.repository, + relatedHandler: &sceneFilterHandler{sceneMarkerFilter.SceneFilter}, + joinFn: func(f *filterBuilder) { + qb.joinScenes(f) + }, + }, + } +} + +func (qb *sceneMarkerFilterHandler) tagIDCriterionHandler(tagID *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if tagID != nil { + f.addLeftJoin("scene_markers_tags", "", "scene_markers_tags.scene_marker_id = scene_markers.id") + + f.addWhere("(scene_markers.primary_tag_id = ? OR scene_markers_tags.tag_id = ?)", *tagID, *tagID) + } + } +} + +func (qb *sceneMarkerFilterHandler) tagsCriterionHandler(criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if criterion != nil { + tags := criterion.CombineExcludes() + + if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { + var notClause string + if tags.Modifier == models.CriterionModifierNotNull { + notClause = "NOT" + } + + f.addLeftJoin("scene_markers_tags", "", "scene_markers.id = scene_markers_tags.scene_marker_id") + + f.addWhere(fmt.Sprintf("%s scene_markers_tags.tag_id IS NULL", notClause)) + return + } + + if tags.Modifier == models.CriterionModifierEquals && tags.Depth != nil && *tags.Depth != 0 { + f.setError(fmt.Errorf("depth is not supported for equals modifier for marker tag filtering")) + return + } + + if len(tags.Value) == 0 && len(tags.Excludes) == 0 { + return + } + + if len(tags.Value) > 0 { + valuesClause, err := getHierarchicalValues(ctx, tags.Value, tagTable, "tags_relations", "parent_id", "child_id", tags.Depth) + if err != nil { + f.setError(err) + return + } + + f.addWith(`marker_tags AS ( + SELECT mt.scene_marker_id, t.column1 AS root_tag_id FROM scene_markers_tags mt + INNER JOIN (` + valuesClause + `) t ON t.column2 = mt.tag_id + UNION + SELECT m.id, t.column1 FROM scene_markers m + INNER JOIN (` + valuesClause + `) t ON t.column2 = m.primary_tag_id + )`) + + f.addLeftJoin("marker_tags", "", "marker_tags.scene_marker_id = scene_markers.id") + + switch tags.Modifier { + case models.CriterionModifierEquals: + // includes only the provided ids + f.addWhere("marker_tags.root_tag_id IS NOT NULL") + tagsLen := len(tags.Value) + f.addHaving(fmt.Sprintf("count(distinct marker_tags.root_tag_id) IS %d", tagsLen)) + // decrement by one to account for primary tag id + f.addWhere("(SELECT COUNT(*) FROM scene_markers_tags s WHERE s.scene_marker_id = scene_markers.id) = ?", tagsLen-1) + case models.CriterionModifierNotEquals: + f.setError(fmt.Errorf("not equals modifier is not supported for scene marker tags")) + default: + addHierarchicalConditionClauses(f, tags, "marker_tags", "root_tag_id") + } + } + + if len(criterion.Excludes) > 0 { + valuesClause, err := getHierarchicalValues(ctx, tags.Excludes, tagTable, "tags_relations", "parent_id", "child_id", tags.Depth) + if err != nil { + f.setError(err) + return + } + + clause := "scene_markers.id NOT IN (SELECT scene_markers_tags.scene_marker_id FROM scene_markers_tags WHERE scene_markers_tags.tag_id IN (SELECT column2 FROM (%s)))" + f.addWhere(fmt.Sprintf(clause, valuesClause)) + + f.addWhere(fmt.Sprintf("scene_markers.primary_tag_id NOT IN (SELECT column2 FROM (%s))", valuesClause)) + } + } + } +} + +func (qb *sceneMarkerFilterHandler) sceneTagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if tags != nil { + f.addLeftJoin("scenes_tags", "", "scene_markers.scene_id = scenes_tags.scene_id") + + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: "scene_markers", + primaryKey: sceneIDColumn, + foreignTable: tagTable, + foreignFK: tagIDColumn, + + relationsTable: "tags_relations", + joinTable: "scenes_tags", + joinAs: "marker_scenes_tags", + primaryFK: sceneIDColumn, + } + + h.handler(tags).handle(ctx, f) + } + } +} + +func (qb *sceneMarkerFilterHandler) performersCriterionHandler(performers *models.MultiCriterionInput) criterionHandlerFunc { + h := joinedMultiCriterionHandlerBuilder{ + primaryTable: sceneTable, + joinTable: performersScenesTable, + joinAs: "performers_join", + primaryFK: sceneIDColumn, + foreignFK: performerIDColumn, + + addJoinTable: func(f *filterBuilder) { + f.addLeftJoin(performersScenesTable, "performers_join", "performers_join.scene_id = scene_markers.scene_id") + }, + } + + handler := h.handler(performers) + return func(ctx context.Context, f *filterBuilder) { + if performers == nil { + return + } + + // Make sure scenes is included, otherwise excludes filter fails + qb.joinScenes(f) + handler(ctx, f) + } +} + +func (qb *sceneMarkerFilterHandler) scenesCriterionHandler(scenes *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + f.addLeftJoin(sceneTable, "markers_scenes", "markers_scenes.id = scene_markers.scene_id") + } + h := multiCriterionHandlerBuilder{ + primaryTable: sceneMarkerTable, + foreignTable: "markers_scenes", + joinTable: "", + primaryFK: sceneIDColumn, + foreignFK: sceneIDColumn, + addJoinsFunc: addJoinsFunc, + } + return h.handler(scenes) +} diff --git a/pkg/sqlite/scene_marker_test.go b/pkg/sqlite/scene_marker_test.go index fffd0b88f08..0a8343a8bfc 100644 --- a/pkg/sqlite/scene_marker_test.go +++ b/pkg/sqlite/scene_marker_test.go @@ -74,6 +74,27 @@ func TestMarkerCountByTagID(t *testing.T) { }) } +func TestMarkerQueryQ(t *testing.T) { + withTxn(func(ctx context.Context) error { + q := getSceneTitle(sceneIdxWithMarkers) + m, _, err := db.SceneMarker.Query(ctx, nil, &models.FindFilterType{ + Q: &q, + }) + + if err != nil { + t.Errorf("Error querying scene markers: %s", err.Error()) + } + + if !assert.Greater(t, len(m), 0) { + return nil + } + + assert.Equal(t, sceneIDs[sceneIdxWithMarkers], m[0].SceneID) + + return nil + }) +} + func TestMarkerQuerySortBySceneUpdated(t *testing.T) { withTxn(func(ctx context.Context) error { sort := "scenes_updated_at" diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index 942a12591f4..a3174d7278d 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -16,6 +16,7 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil" + "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stretchr/testify/assert" ) @@ -41,8 +42,8 @@ func loadSceneRelationships(ctx context.Context, expected models.Scene, actual * return err } } - if expected.Movies.Loaded() { - if err := actual.LoadMovies(ctx, db.Scene); err != nil { + if expected.Groups.Loaded() { + if err := actual.LoadGroups(ctx, db.Scene); err != nil { return err } } @@ -120,13 +121,13 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithScene], + GroupID: groupIDs[groupIdxWithScene], SceneIndex: &sceneIndex, }, { - MovieID: movieIDs[movieIdxWithStudio], + GroupID: groupIDs[groupIdxWithStudio], SceneIndex: &sceneIndex2, }, }), @@ -165,13 +166,13 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithScene], + GroupID: groupIDs[groupIdxWithScene], SceneIndex: &sceneIndex, }, { - MovieID: movieIDs[movieIdxWithStudio], + GroupID: groupIDs[groupIdxWithStudio], SceneIndex: &sceneIndex2, }, }), @@ -219,11 +220,11 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { true, }, { - "invalid movie id", + "invalid group id", models.Scene{ - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: invalidID, + GroupID: invalidID, SceneIndex: &sceneIndex, }, }), @@ -349,13 +350,13 @@ func Test_sceneQueryBuilder_Update(t *testing.T) { GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithScene], + GroupID: groupIDs[groupIdxWithScene], SceneIndex: &sceneIndex, }, { - MovieID: movieIDs[movieIdxWithStudio], + GroupID: groupIDs[groupIdxWithStudio], SceneIndex: &sceneIndex2, }, }), @@ -381,7 +382,7 @@ func Test_sceneQueryBuilder_Update(t *testing.T) { GalleryIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}), - Movies: models.NewRelatedMovies([]models.MoviesScenes{}), + Groups: models.NewRelatedGroups([]models.GroupsScenes{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}), }, false, @@ -411,10 +412,10 @@ func Test_sceneQueryBuilder_Update(t *testing.T) { false, }, { - "clear movies", + "clear groups", &models.Scene{ - ID: sceneIDs[sceneIdxWithMovie], - Movies: models.NewRelatedMovies([]models.MoviesScenes{}), + ID: sceneIDs[sceneIdxWithGroup], + Groups: models.NewRelatedGroups([]models.GroupsScenes{}), }, false, }, @@ -451,12 +452,12 @@ func Test_sceneQueryBuilder_Update(t *testing.T) { true, }, { - "invalid movie id", + "invalid group id", &models.Scene{ ID: sceneIDs[sceneIdxWithSpacedName], - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: invalidID, + GroupID: invalidID, SceneIndex: &sceneIndex, }, }), @@ -573,14 +574,14 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { IDs: []int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}, Mode: models.RelationshipUpdateModeSet, }, - MovieIDs: &models.UpdateMovieIDs{ - Movies: []models.MoviesScenes{ + GroupIDs: &models.UpdateGroupIDs{ + Groups: []models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithScene], + GroupID: groupIDs[groupIdxWithScene], SceneIndex: &sceneIndex, }, { - MovieID: movieIDs[movieIdxWithStudio], + GroupID: groupIDs[groupIdxWithStudio], SceneIndex: &sceneIndex2, }, }, @@ -621,13 +622,13 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { GalleryIDs: models.NewRelatedIDs([]int{galleryIDs[galleryIdxWithScene]}), TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithDupName], tagIDs[tagIdx1WithScene]}), PerformerIDs: models.NewRelatedIDs([]int{performerIDs[performerIdx1WithScene], performerIDs[performerIdx1WithDupName]}), - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithScene], + GroupID: groupIDs[groupIdxWithScene], SceneIndex: &sceneIndex, }, { - MovieID: movieIDs[movieIdxWithStudio], + GroupID: groupIDs[groupIdxWithStudio], SceneIndex: &sceneIndex2, }, }), @@ -658,7 +659,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { GalleryIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}), - Movies: models.NewRelatedMovies([]models.MoviesScenes{}), + Groups: models.NewRelatedGroups([]models.GroupsScenes{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}), PlayDuration: getScenePlayDuration(sceneIdxWithSpacedName), ResumeTime: getSceneResumeTime(sceneIdxWithSpacedName), @@ -727,13 +728,13 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) { stashID1 = "stashid1" stashID2 = "stashid2" - movieScenes = []models.MoviesScenes{ + groupScenes = []models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithDupName], + GroupID: groupIDs[groupIdxWithDupName], SceneIndex: &sceneIndex, }, { - MovieID: movieIDs[movieIdxWithStudio], + GroupID: groupIDs[groupIdxWithStudio], SceneIndex: &sceneIndex2, }, } @@ -863,40 +864,40 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) { false, }, { - "add movies", - sceneIDs[sceneIdxWithMovie], + "add groups", + sceneIDs[sceneIdxWithGroup], models.ScenePartial{ - MovieIDs: &models.UpdateMovieIDs{ - Movies: movieScenes, + GroupIDs: &models.UpdateGroupIDs{ + Groups: groupScenes, Mode: models.RelationshipUpdateModeAdd, }, }, models.Scene{ - Movies: models.NewRelatedMovies(append([]models.MoviesScenes{ + Groups: models.NewRelatedGroups(append([]models.GroupsScenes{ { - MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0], + GroupID: indexesToIDs(groupIDs, sceneGroups[sceneIdxWithGroup])[0], }, - }, movieScenes...)), + }, groupScenes...)), }, false, }, { - "add movies to empty", + "add groups to empty", sceneIDs[sceneIdx1WithPerformer], models.ScenePartial{ - MovieIDs: &models.UpdateMovieIDs{ - Movies: movieScenes, + GroupIDs: &models.UpdateGroupIDs{ + Groups: groupScenes, Mode: models.RelationshipUpdateModeAdd, }, }, models.Scene{ - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithDupName], + GroupID: groupIDs[groupIdxWithDupName], SceneIndex: &sceneIndex, }, { - MovieID: movieIDs[movieIdxWithStudio], + GroupID: groupIDs[groupIdxWithStudio], SceneIndex: &sceneIndex2, }, }), @@ -967,27 +968,27 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) { false, }, { - "add duplicate movies", - sceneIDs[sceneIdxWithMovie], + "add duplicate groups", + sceneIDs[sceneIdxWithGroup], models.ScenePartial{ - MovieIDs: &models.UpdateMovieIDs{ - Movies: append([]models.MoviesScenes{ + GroupIDs: &models.UpdateGroupIDs{ + Groups: append([]models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithScene], + GroupID: groupIDs[groupIdxWithScene], SceneIndex: &sceneIndex, }, }, - movieScenes..., + groupScenes..., ), Mode: models.RelationshipUpdateModeAdd, }, }, models.Scene{ - Movies: models.NewRelatedMovies(append([]models.MoviesScenes{ + Groups: models.NewRelatedGroups(append([]models.GroupsScenes{ { - MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0], + GroupID: indexesToIDs(groupIDs, sceneGroups[sceneIdxWithGroup])[0], }, - }, movieScenes...)), + }, groupScenes...)), }, false, }, @@ -1044,13 +1045,13 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) { true, }, { - "add invalid movies", - sceneIDs[sceneIdxWithMovie], + "add invalid groups", + sceneIDs[sceneIdxWithGroup], models.ScenePartial{ - MovieIDs: &models.UpdateMovieIDs{ - Movies: []models.MoviesScenes{ + GroupIDs: &models.UpdateGroupIDs{ + Groups: []models.GroupsScenes{ { - MovieID: invalidID, + GroupID: invalidID, }, }, Mode: models.RelationshipUpdateModeAdd, @@ -1102,20 +1103,20 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) { false, }, { - "remove movies", - sceneIDs[sceneIdxWithMovie], + "remove groups", + sceneIDs[sceneIdxWithGroup], models.ScenePartial{ - MovieIDs: &models.UpdateMovieIDs{ - Movies: []models.MoviesScenes{ + GroupIDs: &models.UpdateGroupIDs{ + Groups: []models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithScene], + GroupID: groupIDs[groupIdxWithScene], }, }, Mode: models.RelationshipUpdateModeRemove, }, }, models.Scene{ - Movies: models.NewRelatedMovies([]models.MoviesScenes{}), + Groups: models.NewRelatedGroups([]models.GroupsScenes{}), }, false, }, @@ -1176,22 +1177,22 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) { false, }, { - "remove unrelated movies", - sceneIDs[sceneIdxWithMovie], + "remove unrelated groups", + sceneIDs[sceneIdxWithGroup], models.ScenePartial{ - MovieIDs: &models.UpdateMovieIDs{ - Movies: []models.MoviesScenes{ + GroupIDs: &models.UpdateGroupIDs{ + Groups: []models.GroupsScenes{ { - MovieID: movieIDs[movieIdxWithDupName], + GroupID: groupIDs[groupIdxWithDupName], }, }, Mode: models.RelationshipUpdateModeRemove, }, }, models.Scene{ - Movies: models.NewRelatedMovies([]models.MoviesScenes{ + Groups: models.NewRelatedGroups([]models.GroupsScenes{ { - MovieID: indexesToIDs(movieIDs, sceneMovies[sceneIdxWithMovie])[0], + GroupID: indexesToIDs(groupIDs, sceneGroups[sceneIdxWithGroup])[0], }, }), }, @@ -1257,9 +1258,9 @@ func Test_sceneQueryBuilder_UpdatePartialRelationships(t *testing.T) { assert.ElementsMatch(tt.want.GalleryIDs.List(), got.GalleryIDs.List()) assert.ElementsMatch(tt.want.GalleryIDs.List(), s.GalleryIDs.List()) } - if tt.partial.MovieIDs != nil { - assert.ElementsMatch(tt.want.Movies.List(), got.Movies.List()) - assert.ElementsMatch(tt.want.Movies.List(), s.Movies.List()) + if tt.partial.GroupIDs != nil { + assert.ElementsMatch(tt.want.Groups.List(), got.Groups.List()) + assert.ElementsMatch(tt.want.Groups.List(), s.Groups.List()) } if tt.partial.StashIDs != nil { assert.ElementsMatch(tt.want.StashIDs.List(), got.StashIDs.List()) @@ -1467,9 +1468,9 @@ func Test_sceneQueryBuilder_Find(t *testing.T) { false, }, { - "with movies", - sceneIDs[sceneIdxWithMovie], - makeSceneWithID(sceneIdxWithMovie), + "with groups", + sceneIDs[sceneIdxWithGroup], + makeSceneWithID(sceneIdxWithGroup), false, }, } @@ -1527,13 +1528,13 @@ func Test_sceneQueryBuilder_FindMany(t *testing.T) { sceneIDs[sceneIdxWithGallery], sceneIDs[sceneIdxWithTwoPerformers], sceneIDs[sceneIdxWithTwoTags], - sceneIDs[sceneIdxWithMovie], + sceneIDs[sceneIdxWithGroup], }, []*models.Scene{ makeSceneWithID(sceneIdxWithGallery), makeSceneWithID(sceneIdxWithTwoPerformers), makeSceneWithID(sceneIdxWithTwoTags), - makeSceneWithID(sceneIdxWithMovie), + makeSceneWithID(sceneIdxWithGroup), }, false, }, @@ -1608,9 +1609,9 @@ func Test_sceneQueryBuilder_FindByChecksum(t *testing.T) { false, }, { - "with movies", - getChecksum(sceneIdxWithMovie), - []*models.Scene{makeSceneWithID(sceneIdxWithMovie)}, + "with groups", + getChecksum(sceneIdxWithGroup), + []*models.Scene{makeSceneWithID(sceneIdxWithGroup)}, false, }, } @@ -1678,9 +1679,9 @@ func Test_sceneQueryBuilder_FindByOSHash(t *testing.T) { false, }, { - "with movies", - getOSHash(sceneIdxWithMovie), - []*models.Scene{makeSceneWithID(sceneIdxWithMovie)}, + "with groups", + getOSHash(sceneIdxWithGroup), + []*models.Scene{makeSceneWithID(sceneIdxWithGroup)}, false, }, } @@ -1749,9 +1750,9 @@ func Test_sceneQueryBuilder_FindByPath(t *testing.T) { false, }, { - "with movies", - getPath(sceneIdxWithMovie), - []*models.Scene{makeSceneWithID(sceneIdxWithMovie)}, + "with groups", + getPath(sceneIdxWithGroup), + []*models.Scene{makeSceneWithID(sceneIdxWithGroup)}, false, }, } @@ -2107,7 +2108,7 @@ func TestSceneQuery(t *testing.T) { }, }, []int{sceneIdxWithGallery}, - []int{sceneIdxWithMovie}, + []int{sceneIdxWithGroup}, false, }, { @@ -2120,7 +2121,7 @@ func TestSceneQuery(t *testing.T) { }, }, []int{sceneIdxWithGallery}, - []int{sceneIdxWithMovie}, + []int{sceneIdxWithGroup}, false, }, // { @@ -2133,7 +2134,7 @@ func TestSceneQuery(t *testing.T) { // }, // }, // []int{sceneIdxWithGallery}, - // []int{sceneIdxWithMovie}, + // []int{sceneIdxWithGroup}, // false, // }, { @@ -2217,7 +2218,7 @@ func TestSceneQuery(t *testing.T) { }, }) if (err != nil) != tt.wantErr { - t.Errorf("PerformerStore.Query() error = %v, wantErr %v", err, tt.wantErr) + t.Errorf("SceneStore.Query() error = %v, wantErr %v", err, tt.wantErr) return } @@ -2411,10 +2412,12 @@ func TestSceneQueryPathOr(t *testing.T) { Value: scene1Path, Modifier: models.CriterionModifierEquals, }, - Or: &models.SceneFilterType{ - Path: &models.StringCriterionInput{ - Value: scene2Path, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.SceneFilterType]{ + Or: &models.SceneFilterType{ + Path: &models.StringCriterionInput{ + Value: scene2Path, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -2444,10 +2447,12 @@ func TestSceneQueryPathAndRating(t *testing.T) { Value: scenePath, Modifier: models.CriterionModifierEquals, }, - And: &models.SceneFilterType{ - Rating100: &models.IntCriterionInput{ - Value: sceneRating, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.SceneFilterType]{ + And: &models.SceneFilterType{ + Rating100: &models.IntCriterionInput{ + Value: sceneRating, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -2484,8 +2489,10 @@ func TestSceneQueryPathNotRating(t *testing.T) { sceneFilter := models.SceneFilterType{ Path: &pathCriterion, - Not: &models.SceneFilterType{ - Rating100: &ratingCriterion, + OperatorFilter: models.OperatorFilter[models.SceneFilterType]{ + Not: &models.SceneFilterType{ + Rating100: &ratingCriterion, + }, }, } @@ -2516,8 +2523,10 @@ func TestSceneIllegalQuery(t *testing.T) { } sceneFilter := &models.SceneFilterType{ - And: &subFilter, - Or: &subFilter, + OperatorFilter: models.OperatorFilter[models.SceneFilterType]{ + And: &subFilter, + Or: &subFilter, + }, } withTxn(func(ctx context.Context) error { @@ -3100,7 +3109,7 @@ func TestSceneQueryIsMissingMovies(t *testing.T) { IsMissing: &isMissing, } - q := getSceneStringValue(sceneIdxWithMovie, titleField) + q := getSceneStringValue(sceneIdxWithGroup, titleField) findFilter := models.FindFilterType{ Q: &q, } @@ -3114,7 +3123,7 @@ func TestSceneQueryIsMissingMovies(t *testing.T) { // ensure non of the ids equal the one with movies for _, scene := range scenes { - assert.NotEqual(t, sceneIDs[sceneIdxWithMovie], scene.ID) + assert.NotEqual(t, sceneIDs[sceneIdxWithGroup], scene.ID) } return nil @@ -3865,12 +3874,106 @@ func TestSceneQueryStudioDepth(t *testing.T) { }) } +func TestSceneGroups(t *testing.T) { + type criterion struct { + valueIdxs []int + modifier models.CriterionModifier + depth int + } + + tests := []struct { + name string + c criterion + q string + includeIdxs []int + excludeIdxs []int + }{ + { + "includes", + criterion{ + []int{groupIdxWithScene}, + models.CriterionModifierIncludes, + 0, + }, + "", + []int{sceneIdxWithGroup}, + nil, + }, + { + "excludes", + criterion{ + []int{groupIdxWithScene}, + models.CriterionModifierExcludes, + 0, + }, + getSceneStringValue(sceneIdxWithGroup, titleField), + nil, + []int{sceneIdxWithGroup}, + }, + { + "includes (depth = 1)", + criterion{ + []int{groupIdxWithChildWithScene}, + models.CriterionModifierIncludes, + 1, + }, + "", + []int{sceneIdxWithGroupWithParent}, + nil, + }, + } + + for _, tt := range tests { + valueIDs := indexesToIDs(groupIDs, tt.c.valueIdxs) + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + sceneFilter := &models.SceneFilterType{ + Groups: &models.HierarchicalMultiCriterionInput{ + Value: intslice.IntSliceToStringSlice(valueIDs), + Modifier: tt.c.modifier, + }, + } + + if tt.c.depth != 0 { + sceneFilter.Groups.Depth = &tt.c.depth + } + + findFilter := &models.FindFilterType{} + if tt.q != "" { + findFilter.Q = &tt.q + } + + results, err := db.Scene.Query(ctx, models.SceneQueryOptions{ + SceneFilter: sceneFilter, + QueryOptions: models.QueryOptions{ + FindFilter: findFilter, + }, + }) + if err != nil { + t.Errorf("SceneStore.Query() error = %v", err) + return + } + + include := indexesToIDs(sceneIDs, tt.includeIdxs) + exclude := indexesToIDs(sceneIDs, tt.excludeIdxs) + + assert.Subset(results.IDs, include) + + for _, e := range exclude { + assert.NotContains(results.IDs, e) + } + }) + } +} + func TestSceneQueryMovies(t *testing.T) { withTxn(func(ctx context.Context) error { sqb := db.Scene movieCriterion := models.MultiCriterionInput{ Value: []string{ - strconv.Itoa(movieIDs[movieIdxWithScene]), + strconv.Itoa(groupIDs[groupIdxWithScene]), }, Modifier: models.CriterionModifierIncludes, } @@ -3884,16 +3987,16 @@ func TestSceneQueryMovies(t *testing.T) { assert.Len(t, scenes, 1) // ensure id is correct - assert.Equal(t, sceneIDs[sceneIdxWithMovie], scenes[0].ID) + assert.Equal(t, sceneIDs[sceneIdxWithGroup], scenes[0].ID) movieCriterion = models.MultiCriterionInput{ Value: []string{ - strconv.Itoa(movieIDs[movieIdxWithScene]), + strconv.Itoa(groupIDs[groupIdxWithScene]), }, Modifier: models.CriterionModifierExcludes, } - q := getSceneStringValue(sceneIdxWithMovie, titleField) + q := getSceneStringValue(sceneIdxWithGroup, titleField) findFilter := models.FindFilterType{ Q: &q, } @@ -4180,92 +4283,20 @@ func verifyScenesPerformerCount(t *testing.T, performerCountCriterion models.Int }) } -func TestSceneCountByTagID(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Scene - - sceneCount, err := sqb.CountByTagID(ctx, tagIDs[tagIdxWithScene]) - - if err != nil { - t.Errorf("error calling CountByTagID: %s", err.Error()) - } - - assert.Equal(t, 1, sceneCount) - - sceneCount, err = sqb.CountByTagID(ctx, 0) - - if err != nil { - t.Errorf("error calling CountByTagID: %s", err.Error()) - } - - assert.Equal(t, 0, sceneCount) - - return nil - }) -} - -func TestSceneCountByMovieID(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Scene - - sceneCount, err := sqb.CountByMovieID(ctx, movieIDs[movieIdxWithScene]) - - if err != nil { - t.Errorf("error calling CountByMovieID: %s", err.Error()) - } - - assert.Equal(t, 1, sceneCount) - - sceneCount, err = sqb.CountByMovieID(ctx, 0) - - if err != nil { - t.Errorf("error calling CountByMovieID: %s", err.Error()) - } - - assert.Equal(t, 0, sceneCount) - - return nil - }) -} - -func TestSceneCountByStudioID(t *testing.T) { - withTxn(func(ctx context.Context) error { - sqb := db.Scene - - sceneCount, err := sqb.CountByStudioID(ctx, studioIDs[studioIdxWithScene]) - - if err != nil { - t.Errorf("error calling CountByStudioID: %s", err.Error()) - } - - assert.Equal(t, 1, sceneCount) - - sceneCount, err = sqb.CountByStudioID(ctx, 0) - - if err != nil { - t.Errorf("error calling CountByStudioID: %s", err.Error()) - } - - assert.Equal(t, 0, sceneCount) - - return nil - }) -} - func TestFindByMovieID(t *testing.T) { withTxn(func(ctx context.Context) error { sqb := db.Scene - scenes, err := sqb.FindByMovieID(ctx, movieIDs[movieIdxWithScene]) + scenes, err := sqb.FindByGroupID(ctx, groupIDs[groupIdxWithScene]) if err != nil { t.Errorf("error calling FindByMovieID: %s", err.Error()) } assert.Len(t, scenes, 1) - assert.Equal(t, sceneIDs[sceneIdxWithMovie], scenes[0].ID) + assert.Equal(t, sceneIDs[sceneIdxWithGroup], scenes[0].ID) - scenes, err = sqb.FindByMovieID(ctx, 0) + scenes, err = sqb.FindByGroupID(ctx, 0) if err != nil { t.Errorf("error calling FindByMovieID: %s", err.Error()) diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index 91b2b49fb7d..624ffb4e222 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -54,7 +54,7 @@ const ( ) const ( - sceneIdxWithMovie = iota + sceneIdxWithGroup = iota sceneIdxWithGallery sceneIdxWithPerformer sceneIdx1WithPerformer @@ -78,6 +78,7 @@ const ( sceneIdxWithGrandChildStudio sceneIdxMissingPhash sceneIdxWithPerformerParentTag + sceneIdxWithGroupWithParent // new indexes above lastSceneIdx @@ -148,14 +149,23 @@ const ( ) const ( - movieIdxWithScene = iota - movieIdxWithStudio - // movies with dup names start from the end - // create 10 more basic movies (can remove this if we add more indexes) - movieIdxWithDupName = movieIdxWithStudio + 10 - - moviesNameCase = movieIdxWithDupName - moviesNameNoCase = 1 + groupIdxWithScene = iota + groupIdxWithStudio + groupIdxWithTag + groupIdxWithTwoTags + groupIdxWithThreeTags + groupIdxWithGrandChild + groupIdxWithChild + groupIdxWithParentAndChild + groupIdxWithParent + groupIdxWithGrandParent + groupIdxWithParentAndScene + groupIdxWithChildWithScene + // groups with dup names start from the end + groupIdxWithDupName + + groupsNameCase = groupIdxWithDupName + groupsNameNoCase = 1 ) const ( @@ -204,6 +214,9 @@ const ( tagIdxWithPerformer tagIdx1WithPerformer tagIdx2WithPerformer + tagIdxWithStudio + tagIdx1WithStudio + tagIdx2WithStudio tagIdxWithGallery tagIdx1WithGallery tagIdx2WithGallery @@ -214,6 +227,10 @@ const ( tagIdxWithParentAndChild tagIdxWithGrandParent tagIdx2WithMarkers + tagIdxWithGroup + tagIdx1WithGroup + tagIdx2WithGroup + tagIdx3WithGroup // new indexes above // tags with dup names start from the end tagIdx1WithDupName @@ -228,7 +245,7 @@ const ( const ( studioIdxWithScene = iota studioIdxWithTwoScenes - studioIdxWithMovie + studioIdxWithGroup studioIdxWithChildStudio studioIdxWithParentStudio studioIdxWithImage @@ -238,6 +255,10 @@ const ( studioIdxWithScenePerformer studioIdxWithImagePerformer studioIdxWithGalleryPerformer + studioIdxWithTag + studioIdx2WithTag + studioIdxWithTwoTags + studioIdxWithParentTag studioIdxWithGrandChild studioIdxWithParentAndChild studioIdxWithGrandParent @@ -264,9 +285,7 @@ const ( ) const ( - savedFilterIdxDefaultScene = iota - savedFilterIdxDefaultImage - savedFilterIdxScene + savedFilterIdxScene = iota savedFilterIdxImage // new indexes above @@ -293,7 +312,7 @@ var ( sceneIDs []int imageIDs []int performerIDs []int - movieIDs []int + groupIDs []int galleryIDs []int tagIDs []int studioIDs []int @@ -304,7 +323,7 @@ var ( tagNames []string studioNames []string - movieNames []string + groupNames []string performerNames []string ) @@ -377,8 +396,9 @@ var ( sceneIdxWithGallery: {galleryIdxWithScene}, } - sceneMovies = linkMap{ - sceneIdxWithMovie: {movieIdxWithScene}, + sceneGroups = linkMap{ + sceneIdxWithGroup: {groupIdxWithScene}, + sceneIdxWithGroupWithParent: {groupIdxWithParentAndScene}, } sceneStudios = map[int]int{ @@ -484,8 +504,14 @@ var ( ) var ( - movieStudioLinks = [][2]int{ - {movieIdxWithStudio, studioIdxWithMovie}, + groupStudioLinks = [][2]int{ + {groupIdxWithStudio, studioIdxWithGroup}, + } + + groupTags = linkMap{ + groupIdxWithTag: {tagIdxWithGroup}, + groupIdxWithTwoTags: {tagIdx1WithGroup, tagIdx2WithGroup}, + groupIdxWithThreeTags: {tagIdx1WithGroup, tagIdx2WithGroup, tagIdx3WithGroup}, } ) @@ -497,6 +523,15 @@ var ( } ) +var ( + studioTags = linkMap{ + studioIdxWithTag: {tagIdxWithStudio}, + studioIdx2WithTag: {tagIdx2WithStudio}, + studioIdxWithTwoTags: {tagIdx1WithStudio, tagIdx2WithStudio}, + studioIdxWithParentTag: {tagIdxWithParentAndChild}, + } +) + var ( performerTags = linkMap{ performerIdxWithTag: {tagIdxWithPerformer}, @@ -514,15 +549,31 @@ var ( } ) +var ( + groupParentLinks = [][2]int{ + {groupIdxWithChild, groupIdxWithParent}, + {groupIdxWithGrandChild, groupIdxWithParentAndChild}, + {groupIdxWithParentAndChild, groupIdxWithGrandParent}, + {groupIdxWithChildWithScene, groupIdxWithParentAndScene}, + } +) + func indexesToIDs(ids []int, indexes []int) []int { ret := make([]int, len(indexes)) for i, idx := range indexes { - ret[i] = ids[idx] + ret[i] = indexToID(ids, idx) } return ret } +func indexToID(ids []int, idx int) int { + if idx < 0 { + return invalidID + } + return ids[idx] +} + func indexFromID(ids []int, id int) int { for i, v := range ids { if v == id { @@ -622,14 +673,14 @@ func populateDB() error { // TODO - link folders to zip files - if err := createMovies(ctx, db.Movie, moviesNameCase, moviesNameNoCase); err != nil { - return fmt.Errorf("error creating movies: %s", err.Error()) - } - if err := createTags(ctx, db.Tag, tagsNameCase, tagsNameNoCase); err != nil { return fmt.Errorf("error creating tags: %s", err.Error()) } + if err := createGroups(ctx, db.Group, groupsNameCase, groupsNameNoCase); err != nil { + return fmt.Errorf("error creating groups: %s", err.Error()) + } + if err := createPerformers(ctx, performersNameCase, performersNameNoCase); err != nil { return fmt.Errorf("error creating performers: %s", err.Error()) } @@ -658,8 +709,8 @@ func populateDB() error { return fmt.Errorf("error creating saved filters: %s", err.Error()) } - if err := linkMovieStudios(ctx, db.Movie); err != nil { - return fmt.Errorf("error linking movie studios: %s", err.Error()) + if err := linkGroupStudios(ctx, db.Group); err != nil { + return fmt.Errorf("error linking group studios: %s", err.Error()) } if err := linkStudiosParent(ctx); err != nil { @@ -670,6 +721,10 @@ func populateDB() error { return fmt.Errorf("error linking tags parent: %s", err.Error()) } + if err := linkGroupsParent(ctx, db.Group); err != nil { + return fmt.Errorf("error linking tags parent: %s", err.Error()) + } + for _, ms := range markerSpecs { if err := createMarker(ctx, db.SceneMarker, ms); err != nil { return fmt.Errorf("error creating scene marker: %s", err.Error()) @@ -1042,12 +1097,12 @@ func makeScene(i int) *models.Scene { pids := indexesToIDs(performerIDs, scenePerformers[i]) tids := indexesToIDs(tagIDs, sceneTags[i]) - mids := indexesToIDs(movieIDs, sceneMovies[i]) + mids := indexesToIDs(groupIDs, sceneGroups[i]) - movies := make([]models.MoviesScenes, len(mids)) + groups := make([]models.GroupsScenes, len(mids)) for i, m := range mids { - movies[i] = models.MoviesScenes{ - MovieID: m, + groups[i] = models.GroupsScenes{ + GroupID: m, } } @@ -1065,7 +1120,7 @@ func makeScene(i int) *models.Scene { GalleryIDs: models.NewRelatedIDs(gids), PerformerIDs: models.NewRelatedIDs(pids), TagIDs: models.NewRelatedIDs(tids), - Movies: models.NewRelatedMovies(movies), + Groups: models.NewRelatedGroups(groups), StashIDs: models.NewRelatedStashIDs([]models.StashID{ sceneStashID(i), }), @@ -1293,18 +1348,27 @@ func createGalleries(ctx context.Context, n int) error { return nil } -func getMovieStringValue(index int, field string) string { - return getPrefixedStringValue("movie", index, field) +func getGroupStringValue(index int, field string) string { + return getPrefixedStringValue("group", index, field) } -func getMovieNullStringValue(index int, field string) string { - ret := getPrefixedNullStringValue("movie", index, field) +func getGroupNullStringValue(index int, field string) string { + ret := getPrefixedNullStringValue("group", index, field) return ret.String } -// createMoviees creates n movies with plain Name and o movies with camel cased NaMe included -func createMovies(ctx context.Context, mqb models.MovieReaderWriter, n int, o int) error { +func getGroupEmptyString(index int, field string) string { + v := getPrefixedNullStringValue("group", index, field) + if !v.Valid { + return "" + } + + return v.String +} + +// createGroups creates n groups with plain Name and o groups with camel cased NaMe included +func createGroups(ctx context.Context, mqb models.GroupReaderWriter, n int, o int) error { const namePlain = "Name" const nameNoCase = "NaMe" @@ -1312,26 +1376,31 @@ func createMovies(ctx context.Context, mqb models.MovieReaderWriter, n int, o in index := i name := namePlain + tids := indexesToIDs(tagIDs, groupTags[i]) + if i >= n { // i=n movies get dup names if case is not checked + name = nameNoCase // i>=n groups get dup names if case is not checked index = n + o - (i + 1) // for the name to be the same the number (index) must be the same also } // so count backwards to 0 as needed - // movies [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different + // groups [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different - name = getMovieStringValue(index, name) - movie := models.Movie{ + name = getGroupStringValue(index, name) + group := models.Group{ Name: name, - URL: getMovieNullStringValue(index, urlField), + URLs: models.NewRelatedStrings([]string{ + getGroupEmptyString(i, urlField), + }), + TagIDs: models.NewRelatedIDs(tids), } - err := mqb.Create(ctx, &movie) + err := mqb.Create(ctx, &group) if err != nil { - return fmt.Errorf("Error creating movie [%d] %v+: %s", i, movie, err.Error()) + return fmt.Errorf("Error creating group [%d] %v+: %s", i, group, err.Error()) } - movieIDs = append(movieIDs, movie.ID) - movieNames = append(movieNames, movie.Name) + groupIDs = append(groupIDs, group.ID) + groupNames = append(groupNames, group.Name) } return nil @@ -1347,6 +1416,15 @@ func getPerformerNullStringValue(index int, field string) string { return ret.String } +func getPerformerEmptyString(index int, field string) string { + v := getPrefixedNullStringValue("performer", index, field) + if !v.Valid { + return "" + } + + return v.String +} + func getPerformerBoolValue(index int) bool { index = index % 2 return index == 1 @@ -1452,17 +1530,19 @@ func createPerformers(ctx context.Context, n int, o int) error { Name: getPerformerStringValue(index, name), Disambiguation: getPerformerStringValue(index, "disambiguation"), Aliases: models.NewRelatedStrings(performerAliases(index)), - URL: getPerformerNullStringValue(i, urlField), - Favorite: getPerformerBoolValue(i), - Birthdate: getPerformerBirthdate(i), - DeathDate: getPerformerDeathDate(i), - Details: getPerformerStringValue(i, "Details"), - Ethnicity: getPerformerStringValue(i, "Ethnicity"), - PenisLength: getPerformerPenisLength(i), - Circumcised: getPerformerCircumcised(i), - Rating: getIntPtr(getRating(i)), - IgnoreAutoTag: getIgnoreAutoTag(i), - TagIDs: models.NewRelatedIDs(tids), + URLs: models.NewRelatedStrings([]string{ + getPerformerEmptyString(i, urlField), + }), + Favorite: getPerformerBoolValue(i), + Birthdate: getPerformerBirthdate(i), + DeathDate: getPerformerDeathDate(i), + Details: getPerformerStringValue(i, "Details"), + Ethnicity: getPerformerStringValue(i, "Ethnicity"), + PenisLength: getPerformerPenisLength(i), + Circumcised: getPerformerCircumcised(i), + Rating: getIntPtr(getRating(i)), + IgnoreAutoTag: getIgnoreAutoTag(i), + TagIDs: models.NewRelatedIDs(tids), } careerLength := getPerformerCareerLength(i) @@ -1528,6 +1608,11 @@ func getTagPerformerCount(id int) int { return len(performerTags.reverseLookup(idx)) } +func getTagStudioCount(id int) int { + idx := indexFromID(tagIDs, id) + return len(studioTags.reverseLookup(idx)) +} + func getTagParentCount(id int) int { if id == tagIDs[tagIdxWithParentTag] || id == tagIDs[tagIdxWithGrandParent] || id == tagIDs[tagIdxWithParentAndChild] { return 1 @@ -1643,14 +1728,16 @@ func createStudios(ctx context.Context, n int, o int) error { // studios [ i ] and [ n + o - i - 1 ] should have similar names with only the Name!=NaMe part different name = getStudioStringValue(index, name) + tids := indexesToIDs(tagIDs, studioTags[i]) studio := models.Studio{ Name: name, URL: getStudioStringValue(index, urlField), Favorite: getStudioBoolValue(index), IgnoreAutoTag: getIgnoreAutoTag(i), + TagIDs: models.NewRelatedIDs(tids), } // only add aliases for some scenes - if i == studioIdxWithMovie || i%5 == 0 { + if i == studioIdxWithGroup || i%5 == 0 { alias := getStudioStringValue(i, "Alias") studio.Aliases = models.NewRelatedStrings([]string{alias}) } @@ -1716,9 +1803,9 @@ func createChapter(ctx context.Context, mqb models.GalleryChapterReaderWriter, c func getSavedFilterMode(index int) models.FilterMode { switch index { - case savedFilterIdxScene, savedFilterIdxDefaultScene: + case savedFilterIdxScene: return models.FilterModeScenes - case savedFilterIdxImage, savedFilterIdxDefaultImage: + case savedFilterIdxImage: return models.FilterModeImages default: return models.FilterModeScenes @@ -1726,11 +1813,6 @@ func getSavedFilterMode(index int) models.FilterMode { } func getSavedFilterName(index int) string { - if index <= savedFilterIdxDefaultImage { - // empty string for default filters - return "" - } - if index <= savedFilterIdxImage { // use the same name for the first two - should be possible return firstSavedFilterName @@ -1788,12 +1870,12 @@ func doLinks(links [][2]int, fn func(idx1, idx2 int) error) error { return nil } -func linkMovieStudios(ctx context.Context, mqb models.MovieWriter) error { - return doLinks(movieStudioLinks, func(movieIndex, studioIndex int) error { - movie := models.MoviePartial{ +func linkGroupStudios(ctx context.Context, mqb models.GroupWriter) error { + return doLinks(groupStudioLinks, func(groupIndex, studioIndex int) error { + group := models.GroupPartial{ StudioID: models.NewOptionalInt(studioIDs[studioIndex]), } - _, err := mqb.UpdatePartial(ctx, movieIDs[movieIndex], movie) + _, err := mqb.UpdatePartial(ctx, groupIDs[groupIndex], group) return err }) @@ -1831,6 +1913,24 @@ func linkTagsParent(ctx context.Context, qb models.TagReaderWriter) error { }) } +func linkGroupsParent(ctx context.Context, qb models.GroupReaderWriter) error { + return doLinks(groupParentLinks, func(parentIndex, childIndex int) error { + groupID := groupIDs[childIndex] + + p := models.GroupPartial{ + ContainingGroups: &models.UpdateGroupDescriptions{ + Groups: []models.GroupIDDescription{ + {GroupID: groupIDs[parentIndex]}, + }, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + _, err := qb.UpdatePartial(ctx, groupID, p) + return err + }) +} + func addTagImage(ctx context.Context, qb models.TagWriter, tagIndex int) error { return qb.UpdateImage(ctx, tagIDs[tagIndex], []byte("image")) } diff --git a/pkg/sqlite/sql.go b/pkg/sqlite/sql.go index 2c5e7d39686..780d2e9881b 100644 --- a/pkg/sqlite/sql.go +++ b/pkg/sqlite/sql.go @@ -21,6 +21,11 @@ func distinctIDs(qb *queryBuilder, tableName string) { qb.from = tableName } +func selectIDs(qb *queryBuilder, tableName string) { + qb.addColumn(getColumn(tableName, "id")) + qb.from = tableName +} + func getColumn(tableName string, columnName string) string { return tableName + "." + columnName } diff --git a/pkg/sqlite/studio.go b/pkg/sqlite/studio.go index e6ab0315786..95edf4173e2 100644 --- a/pkg/sqlite/studio.go +++ b/pkg/sqlite/studio.go @@ -25,6 +25,7 @@ const ( studioParentIDColumn = "parent_id" studioNameColumn = "name" studioImageBlobColumn = "image_blob" + studiosTagsTable = "studios_tags" ) type studioRow struct { @@ -90,23 +91,71 @@ func (r *studioRowRecord) fromPartial(o models.StudioPartial) { r.setBool("ignore_auto_tag", o.IgnoreAutoTag) } -type StudioStore struct { +type studioRepositoryType struct { repository - blobJoinQueryBuilder - tableMgr *table + stashIDs stashIDRepository + tags joinRepository + + scenes repository + images repository + galleries repository } -func NewStudioStore(blobStore *BlobStore) *StudioStore { - return &StudioStore{ +var ( + studioRepository = studioRepositoryType{ repository: repository{ tableName: studioTable, idColumn: idColumn, }, + stashIDs: stashIDRepository{ + repository{ + tableName: "studio_stash_ids", + idColumn: studioIDColumn, + }, + }, + scenes: repository{ + tableName: sceneTable, + idColumn: studioIDColumn, + }, + images: repository{ + tableName: imageTable, + idColumn: studioIDColumn, + }, + galleries: repository{ + tableName: galleryTable, + idColumn: studioIDColumn, + }, + tags: joinRepository{ + repository: repository{ + tableName: studiosTagsTable, + idColumn: studioIDColumn, + }, + fkColumn: tagIDColumn, + foreignTable: tagTable, + orderBy: "tags.name ASC", + }, + } +) + +type StudioStore struct { + blobJoinQueryBuilder + tagRelationshipStore + + tableMgr *table +} + +func NewStudioStore(blobStore *BlobStore) *StudioStore { + return &StudioStore{ blobJoinQueryBuilder: blobJoinQueryBuilder{ blobStore: blobStore, joinTable: studioTable, }, + tagRelationshipStore: tagRelationshipStore{ + idRelationshipStore: idRelationshipStore{ + joinTable: studiosTagsTableMgr, + }, + }, tableMgr: studioTableMgr, } @@ -141,13 +190,17 @@ func (qb *StudioStore) Create(ctx context.Context, newObject *models.Studio) err } } + if err := qb.tagRelationshipStore.createRelationships(ctx, id, newObject.TagIDs); err != nil { + return err + } + if newObject.StashIDs.Loaded() { if err := studiosStashIDsTableMgr.insertJoins(ctx, id, newObject.StashIDs.List()); err != nil { return err } } - updated, _ := qb.find(ctx, id) + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } @@ -181,6 +234,10 @@ func (qb *StudioStore) UpdatePartial(ctx context.Context, input models.StudioPar } } + if err := qb.tagRelationshipStore.modifyRelationships(ctx, input.ID, input.TagIDs); err != nil { + return nil, err + } + if input.StashIDs != nil { if err := studiosStashIDsTableMgr.modifyJoins(ctx, input.ID, input.StashIDs.StashIDs, input.StashIDs.Mode); err != nil { return nil, err @@ -205,6 +262,10 @@ func (qb *StudioStore) Update(ctx context.Context, updatedObject *models.Studio) } } + if err := qb.tagRelationshipStore.replaceRelationships(ctx, updatedObject.ID, updatedObject.TagIDs); err != nil { + return err + } + if updatedObject.StashIDs.Loaded() { if err := studiosStashIDsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.StashIDs.List()); err != nil { return err @@ -220,7 +281,7 @@ func (qb *StudioStore) Destroy(ctx context.Context, id int) error { return err } - return qb.destroyExisting(ctx, []int{id}) + return studioRepository.destroyExisting(ctx, []int{id}) } // returns nil, nil if not found @@ -452,83 +513,6 @@ func (qb *StudioStore) QueryForAutoTag(ctx context.Context, words []string) ([]* return ret, nil } -func (qb *StudioStore) validateFilter(filter *models.StudioFilterType) error { - const and = "AND" - const or = "OR" - const not = "NOT" - - if filter.And != nil { - if filter.Or != nil { - return illegalFilterCombination(and, or) - } - if filter.Not != nil { - return illegalFilterCombination(and, not) - } - - return qb.validateFilter(filter.And) - } - - if filter.Or != nil { - if filter.Not != nil { - return illegalFilterCombination(or, not) - } - - return qb.validateFilter(filter.Or) - } - - if filter.Not != nil { - return qb.validateFilter(filter.Not) - } - - return nil -} - -func (qb *StudioStore) makeFilter(ctx context.Context, studioFilter *models.StudioFilterType) *filterBuilder { - query := &filterBuilder{} - - if studioFilter.And != nil { - query.and(qb.makeFilter(ctx, studioFilter.And)) - } - if studioFilter.Or != nil { - query.or(qb.makeFilter(ctx, studioFilter.Or)) - } - if studioFilter.Not != nil { - query.not(qb.makeFilter(ctx, studioFilter.Not)) - } - - query.handleCriterion(ctx, stringCriterionHandler(studioFilter.Name, studioTable+".name")) - query.handleCriterion(ctx, stringCriterionHandler(studioFilter.Details, studioTable+".details")) - query.handleCriterion(ctx, stringCriterionHandler(studioFilter.URL, studioTable+".url")) - query.handleCriterion(ctx, intCriterionHandler(studioFilter.Rating100, studioTable+".rating", nil)) - query.handleCriterion(ctx, boolCriterionHandler(studioFilter.Favorite, studioTable+".favorite", nil)) - query.handleCriterion(ctx, boolCriterionHandler(studioFilter.IgnoreAutoTag, studioTable+".ignore_auto_tag", nil)) - - query.handleCriterion(ctx, criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { - if studioFilter.StashID != nil { - qb.stashIDRepository().join(f, "studio_stash_ids", "studios.id") - stringCriterionHandler(studioFilter.StashID, "studio_stash_ids.stash_id")(ctx, f) - } - })) - query.handleCriterion(ctx, &stashIDCriterionHandler{ - c: studioFilter.StashIDEndpoint, - stashIDRepository: qb.stashIDRepository(), - stashIDTableAs: "studio_stash_ids", - parentIDCol: "studios.id", - }) - - query.handleCriterion(ctx, studioIsMissingCriterionHandler(qb, studioFilter.IsMissing)) - query.handleCriterion(ctx, studioSceneCountCriterionHandler(qb, studioFilter.SceneCount)) - query.handleCriterion(ctx, studioImageCountCriterionHandler(qb, studioFilter.ImageCount)) - query.handleCriterion(ctx, studioGalleryCountCriterionHandler(qb, studioFilter.GalleryCount)) - query.handleCriterion(ctx, studioParentCriterionHandler(qb, studioFilter.Parents)) - query.handleCriterion(ctx, studioAliasCriterionHandler(qb, studioFilter.Aliases)) - query.handleCriterion(ctx, studioChildCountCriterionHandler(qb, studioFilter.ChildCount)) - query.handleCriterion(ctx, timestampCriterionHandler(studioFilter.CreatedAt, studioTable+".created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(studioFilter.UpdatedAt, studioTable+".updated_at")) - - return query -} - func (qb *StudioStore) makeQuery(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) (*queryBuilder, error) { if studioFilter == nil { studioFilter = &models.StudioFilterType{} @@ -537,7 +521,7 @@ func (qb *StudioStore) makeQuery(ctx context.Context, studioFilter *models.Studi findFilter = &models.FindFilterType{} } - query := qb.newQuery() + query := studioRepository.newQuery() distinctIDs(&query, studioTable) if q := findFilter.Q; q != nil && *q != "" { @@ -546,10 +530,9 @@ func (qb *StudioStore) makeQuery(ctx context.Context, studioFilter *models.Studi query.parseQueryString(searchColumns, *q) } - if err := qb.validateFilter(studioFilter); err != nil { - return nil, err - } - filter := qb.makeFilter(ctx, studioFilter) + filter := filterBuilderFromHandler(ctx, &studioFilterHandler{ + studioFilter: studioFilter, + }) if err := query.addFilter(filter); err != nil { return nil, err @@ -584,91 +567,13 @@ func (qb *StudioStore) Query(ctx context.Context, studioFilter *models.StudioFil return studios, countResult, nil } -func studioIsMissingCriterionHandler(qb *StudioStore, isMissing *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if isMissing != nil && *isMissing != "" { - switch *isMissing { - case "image": - f.addWhere("studios.image_blob IS NULL") - case "stash_id": - qb.stashIDRepository().join(f, "studio_stash_ids", "studios.id") - f.addWhere("studio_stash_ids.studio_id IS NULL") - default: - f.addWhere("(studios." + *isMissing + " IS NULL OR TRIM(studios." + *isMissing + ") = '')") - } - } - } -} - -func studioSceneCountCriterionHandler(qb *StudioStore, sceneCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if sceneCount != nil { - f.addLeftJoin("scenes", "", "scenes.studio_id = studios.id") - clause, args := getIntCriterionWhereClause("count(distinct scenes.id)", *sceneCount) - - f.addHaving(clause, args...) - } - } -} - -func studioImageCountCriterionHandler(qb *StudioStore, imageCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if imageCount != nil { - f.addLeftJoin("images", "", "images.studio_id = studios.id") - clause, args := getIntCriterionWhereClause("count(distinct images.id)", *imageCount) - - f.addHaving(clause, args...) - } - } -} - -func studioGalleryCountCriterionHandler(qb *StudioStore, galleryCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if galleryCount != nil { - f.addLeftJoin("galleries", "", "galleries.studio_id = studios.id") - clause, args := getIntCriterionWhereClause("count(distinct galleries.id)", *galleryCount) - - f.addHaving(clause, args...) - } - } -} - -func studioParentCriterionHandler(qb *StudioStore, parents *models.MultiCriterionInput) criterionHandlerFunc { - addJoinsFunc := func(f *filterBuilder) { - f.addLeftJoin("studios", "parent_studio", "parent_studio.id = studios.parent_id") - } - h := multiCriterionHandlerBuilder{ - primaryTable: studioTable, - foreignTable: "parent_studio", - joinTable: "", - primaryFK: studioIDColumn, - foreignFK: "parent_id", - addJoinsFunc: addJoinsFunc, - } - return h.handler(parents) -} - -func studioAliasCriterionHandler(qb *StudioStore, alias *models.StringCriterionInput) criterionHandlerFunc { - h := stringListCriterionHandlerBuilder{ - joinTable: studioAliasesTable, - stringColumn: studioAliasColumn, - addJoinTable: func(f *filterBuilder) { - studiosAliasesTableMgr.join(f, "", "studios.id") - }, +func (qb *StudioStore) QueryCount(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) (int, error) { + query, err := qb.makeQuery(ctx, studioFilter, findFilter) + if err != nil { + return 0, err } - return h.handler(alias) -} - -func studioChildCountCriterionHandler(qb *StudioStore, childCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if childCount != nil { - f.addLeftJoin("studios", "children_count", "children_count.parent_id = studios.id") - clause, args := getIntCriterionWhereClause("count(distinct children_count.id)", *childCount) - - f.addHaving(clause, args...) - } - } + return query.executeCount(ctx) } var studioSortOptions = sortOptions{ @@ -702,6 +607,8 @@ func (qb *StudioStore) getStudioSort(findFilter *models.FindFilterType) (string, sortQuery := "" switch sort { + case "tag_count": + sortQuery += getCountSort(studioTable, studiosTagsTable, studioIDColumn, direction) case "scenes_count": sortQuery += getCountSort(studioTable, sceneTable, studioIDColumn, direction) case "images_count": @@ -735,16 +642,6 @@ func (qb *StudioStore) destroyImage(ctx context.Context, studioID int) error { return qb.blobJoinQueryBuilder.DestroyImage(ctx, studioID, studioImageBlobColumn) } -func (qb *StudioStore) stashIDRepository() *stashIDRepository { - return &stashIDRepository{ - repository{ - tx: qb.tx, - tableName: "studio_stash_ids", - idColumn: studioIDColumn, - }, - } -} - func (qb *StudioStore) GetStashIDs(ctx context.Context, studioID int) ([]models.StashID, error) { return studiosStashIDsTableMgr.get(ctx, studioID) } diff --git a/pkg/sqlite/studio_filter.go b/pkg/sqlite/studio_filter.go new file mode 100644 index 00000000000..c514364c4ff --- /dev/null +++ b/pkg/sqlite/studio_filter.go @@ -0,0 +1,229 @@ +package sqlite + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" +) + +type studioFilterHandler struct { + studioFilter *models.StudioFilterType +} + +func (qb *studioFilterHandler) validate() error { + studioFilter := qb.studioFilter + if studioFilter == nil { + return nil + } + + if err := validateFilterCombination(studioFilter.OperatorFilter); err != nil { + return err + } + + if subFilter := studioFilter.SubFilter(); subFilter != nil { + sqb := &studioFilterHandler{studioFilter: subFilter} + if err := sqb.validate(); err != nil { + return err + } + } + + return nil +} + +func (qb *studioFilterHandler) handle(ctx context.Context, f *filterBuilder) { + studioFilter := qb.studioFilter + if studioFilter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + sf := studioFilter.SubFilter() + if sf != nil { + sub := &studioFilterHandler{sf} + handleSubFilter(ctx, sub, f, studioFilter.OperatorFilter) + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +func (qb *studioFilterHandler) criterionHandler() criterionHandler { + studioFilter := qb.studioFilter + return compoundHandler{ + stringCriterionHandler(studioFilter.Name, studioTable+".name"), + stringCriterionHandler(studioFilter.Details, studioTable+".details"), + stringCriterionHandler(studioFilter.URL, studioTable+".url"), + intCriterionHandler(studioFilter.Rating100, studioTable+".rating", nil), + boolCriterionHandler(studioFilter.Favorite, studioTable+".favorite", nil), + boolCriterionHandler(studioFilter.IgnoreAutoTag, studioTable+".ignore_auto_tag", nil), + + criterionHandlerFunc(func(ctx context.Context, f *filterBuilder) { + if studioFilter.StashID != nil { + studioRepository.stashIDs.join(f, "studio_stash_ids", "studios.id") + stringCriterionHandler(studioFilter.StashID, "studio_stash_ids.stash_id")(ctx, f) + } + }), + &stashIDCriterionHandler{ + c: studioFilter.StashIDEndpoint, + stashIDRepository: &studioRepository.stashIDs, + stashIDTableAs: "studio_stash_ids", + parentIDCol: "studios.id", + }, + + qb.isMissingCriterionHandler(studioFilter.IsMissing), + qb.tagCountCriterionHandler(studioFilter.TagCount), + qb.sceneCountCriterionHandler(studioFilter.SceneCount), + qb.imageCountCriterionHandler(studioFilter.ImageCount), + qb.galleryCountCriterionHandler(studioFilter.GalleryCount), + qb.parentCriterionHandler(studioFilter.Parents), + qb.aliasCriterionHandler(studioFilter.Aliases), + qb.tagsCriterionHandler(studioFilter.Tags), + qb.childCountCriterionHandler(studioFilter.ChildCount), + ×tampCriterionHandler{studioFilter.CreatedAt, studioTable + ".created_at", nil}, + ×tampCriterionHandler{studioFilter.UpdatedAt, studioTable + ".updated_at", nil}, + + &relatedFilterHandler{ + relatedIDCol: "scenes.id", + relatedRepo: sceneRepository.repository, + relatedHandler: &sceneFilterHandler{studioFilter.ScenesFilter}, + joinFn: func(f *filterBuilder) { + studioRepository.scenes.innerJoin(f, "", "studios.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "images.id", + relatedRepo: imageRepository.repository, + relatedHandler: &imageFilterHandler{studioFilter.ImagesFilter}, + joinFn: func(f *filterBuilder) { + studioRepository.images.innerJoin(f, "", "studios.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "galleries.id", + relatedRepo: galleryRepository.repository, + relatedHandler: &galleryFilterHandler{studioFilter.GalleriesFilter}, + joinFn: func(f *filterBuilder) { + studioRepository.galleries.innerJoin(f, "", "studios.id") + }, + }, + } +} + +func (qb *studioFilterHandler) isMissingCriterionHandler(isMissing *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "image": + f.addWhere("studios.image_blob IS NULL") + case "stash_id": + studioRepository.stashIDs.join(f, "studio_stash_ids", "studios.id") + f.addWhere("studio_stash_ids.studio_id IS NULL") + default: + f.addWhere("(studios." + *isMissing + " IS NULL OR TRIM(studios." + *isMissing + ") = '')") + } + } + } +} + +func (qb *studioFilterHandler) sceneCountCriterionHandler(sceneCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if sceneCount != nil { + f.addLeftJoin("scenes", "", "scenes.studio_id = studios.id") + clause, args := getIntCriterionWhereClause("count(distinct scenes.id)", *sceneCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *studioFilterHandler) imageCountCriterionHandler(imageCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if imageCount != nil { + f.addLeftJoin("images", "", "images.studio_id = studios.id") + clause, args := getIntCriterionWhereClause("count(distinct images.id)", *imageCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *studioFilterHandler) galleryCountCriterionHandler(galleryCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if galleryCount != nil { + f.addLeftJoin("galleries", "", "galleries.studio_id = studios.id") + clause, args := getIntCriterionWhereClause("count(distinct galleries.id)", *galleryCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *studioFilterHandler) tagCountCriterionHandler(tagCount *models.IntCriterionInput) criterionHandlerFunc { + h := countCriterionHandlerBuilder{ + primaryTable: studioTable, + joinTable: studiosTagsTable, + primaryFK: studioIDColumn, + } + + return h.handler(tagCount) +} + +func (qb *studioFilterHandler) parentCriterionHandler(parents *models.MultiCriterionInput) criterionHandlerFunc { + addJoinsFunc := func(f *filterBuilder) { + f.addLeftJoin("studios", "parent_studio", "parent_studio.id = studios.parent_id") + } + h := multiCriterionHandlerBuilder{ + primaryTable: studioTable, + foreignTable: "parent_studio", + joinTable: "", + primaryFK: studioIDColumn, + foreignFK: "parent_id", + addJoinsFunc: addJoinsFunc, + } + return h.handler(parents) +} + +func (qb *studioFilterHandler) aliasCriterionHandler(alias *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: studioTable, + primaryFK: studioIDColumn, + joinTable: studioAliasesTable, + stringColumn: studioAliasColumn, + addJoinTable: func(f *filterBuilder) { + studiosAliasesTableMgr.join(f, "", "studios.id") + }, + } + + return h.handler(alias) +} + +func (qb *studioFilterHandler) childCountCriterionHandler(childCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if childCount != nil { + f.addLeftJoin("studios", "children_count", "children_count.parent_id = studios.id") + clause, args := getIntCriterionWhereClause("count(distinct children_count.id)", *childCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *studioFilterHandler) tagsCriterionHandler(tags *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { + h := joinedHierarchicalMultiCriterionHandlerBuilder{ + primaryTable: studioTable, + foreignTable: tagTable, + foreignFK: "tag_id", + + relationsTable: "tags_relations", + joinTable: studiosTagsTable, + joinAs: "studio_tag", + primaryFK: studioIDColumn, + } + + return h.handler(tags) +} diff --git a/pkg/sqlite/studio_test.go b/pkg/sqlite/studio_test.go index 25f8ea195d3..a61dadc245f 100644 --- a/pkg/sqlite/studio_test.go +++ b/pkg/sqlite/studio_test.go @@ -59,10 +59,12 @@ func TestStudioQueryNameOr(t *testing.T) { Value: studio1Name, Modifier: models.CriterionModifierEquals, }, - Or: &models.StudioFilterType{ - Name: &models.StringCriterionInput{ - Value: studio2Name, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.StudioFilterType]{ + Or: &models.StudioFilterType{ + Name: &models.StringCriterionInput{ + Value: studio2Name, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -90,10 +92,12 @@ func TestStudioQueryNameAndUrl(t *testing.T) { Value: studioName, Modifier: models.CriterionModifierEquals, }, - And: &models.StudioFilterType{ - URL: &models.StringCriterionInput{ - Value: studioUrl, - Modifier: models.CriterionModifierEquals, + OperatorFilter: models.OperatorFilter[models.StudioFilterType]{ + And: &models.StudioFilterType{ + URL: &models.StringCriterionInput{ + Value: studioUrl, + Modifier: models.CriterionModifierEquals, + }, }, }, } @@ -128,8 +132,10 @@ func TestStudioQueryNameNotUrl(t *testing.T) { studioFilter := models.StudioFilterType{ Name: &nameCriterion, - Not: &models.StudioFilterType{ - URL: &urlCriterion, + OperatorFilter: models.OperatorFilter[models.StudioFilterType]{ + Not: &models.StudioFilterType{ + URL: &urlCriterion, + }, }, } @@ -160,8 +166,10 @@ func TestStudioIllegalQuery(t *testing.T) { } studioFilter := &models.StudioFilterType{ - And: &subFilter, - Or: &subFilter, + OperatorFilter: models.OperatorFilter[models.StudioFilterType]{ + And: &subFilter, + Or: &subFilter, + }, } withTxn(func(ctx context.Context) error { @@ -208,7 +216,7 @@ func TestStudioQueryForAutoTag(t *testing.T) { withTxn(func(ctx context.Context) error { tqb := db.Studio - name := studioNames[studioIdxWithMovie] // find a studio by name + name := studioNames[studioIdxWithGroup] // find a studio by name studios, err := tqb.QueryForAutoTag(ctx, []string{name}) @@ -217,16 +225,16 @@ func TestStudioQueryForAutoTag(t *testing.T) { } assert.Len(t, studios, 1) - assert.Equal(t, strings.ToLower(studioNames[studioIdxWithMovie]), strings.ToLower(studios[0].Name)) + assert.Equal(t, strings.ToLower(studioNames[studioIdxWithGroup]), strings.ToLower(studios[0].Name)) - name = getStudioStringValue(studioIdxWithMovie, "Alias") + name = getStudioStringValue(studioIdxWithGroup, "Alias") studios, err = tqb.QueryForAutoTag(ctx, []string{name}) if err != nil { t.Errorf("Error finding studios: %s", err.Error()) } if assert.Len(t, studios, 1) { - assert.Equal(t, studioIDs[studioIdxWithMovie], studios[0].ID) + assert.Equal(t, studioIDs[studioIdxWithGroup], studios[0].ID) } return nil }) @@ -696,6 +704,110 @@ func TestStudioQueryRating(t *testing.T) { verifyStudiosRating(t, ratingCriterion) } +func queryStudios(ctx context.Context, t *testing.T, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) []*models.Studio { + t.Helper() + studios, _, err := db.Studio.Query(ctx, studioFilter, findFilter) + if err != nil { + t.Errorf("Error querying studio: %s", err.Error()) + } + + return studios +} + +func TestStudioQueryTags(t *testing.T) { + withTxn(func(ctx context.Context) error { + tagCriterion := models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdxWithStudio]), + strconv.Itoa(tagIDs[tagIdx1WithStudio]), + }, + Modifier: models.CriterionModifierIncludes, + } + + studioFilter := models.StudioFilterType{ + Tags: &tagCriterion, + } + + // ensure ids are correct + studios := queryStudios(ctx, t, &studioFilter, nil) + assert.Len(t, studios, 2) + for _, studio := range studios { + assert.True(t, studio.ID == studioIDs[studioIdxWithTag] || studio.ID == studioIDs[studioIdxWithTwoTags]) + } + + tagCriterion = models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithStudio]), + strconv.Itoa(tagIDs[tagIdx2WithStudio]), + }, + Modifier: models.CriterionModifierIncludesAll, + } + + studios = queryStudios(ctx, t, &studioFilter, nil) + + assert.Len(t, studios, 1) + assert.Equal(t, sceneIDs[studioIdxWithTwoTags], studios[0].ID) + + tagCriterion = models.HierarchicalMultiCriterionInput{ + Value: []string{ + strconv.Itoa(tagIDs[tagIdx1WithStudio]), + }, + Modifier: models.CriterionModifierExcludes, + } + + q := getSceneStringValue(studioIdxWithTwoTags, titleField) + findFilter := models.FindFilterType{ + Q: &q, + } + + studios = queryStudios(ctx, t, &studioFilter, &findFilter) + assert.Len(t, studios, 0) + + return nil + }) +} + +func TestStudioQueryTagCount(t *testing.T) { + const tagCount = 1 + tagCountCriterion := models.IntCriterionInput{ + Value: tagCount, + Modifier: models.CriterionModifierEquals, + } + + verifyStudiosTagCount(t, tagCountCriterion) + + tagCountCriterion.Modifier = models.CriterionModifierNotEquals + verifyStudiosTagCount(t, tagCountCriterion) + + tagCountCriterion.Modifier = models.CriterionModifierGreaterThan + verifyStudiosTagCount(t, tagCountCriterion) + + tagCountCriterion.Modifier = models.CriterionModifierLessThan + verifyStudiosTagCount(t, tagCountCriterion) +} + +func verifyStudiosTagCount(t *testing.T, tagCountCriterion models.IntCriterionInput) { + withTxn(func(ctx context.Context) error { + sqb := db.Studio + studioFilter := models.StudioFilterType{ + TagCount: &tagCountCriterion, + } + + studios := queryStudios(ctx, t, &studioFilter, nil) + assert.Greater(t, len(studios), 0) + + for _, studio := range studios { + ids, err := sqb.GetTagIDs(ctx, studio.ID) + if err != nil { + return err + } + verifyInt(t, len(ids), tagCountCriterion) + } + + return nil + }) +} + func verifyStudioQuery(t *testing.T, filter models.StudioFilterType, verifyFn func(ctx context.Context, s *models.Studio)) { withTxn(func(ctx context.Context) error { t.Helper() @@ -799,7 +911,7 @@ func TestStudioQueryName(t *testing.T) { } func TestStudioQueryAlias(t *testing.T) { - const studioIdx = studioIdxWithMovie + const studioIdx = studioIdxWithGroup studioName := getStudioStringValue(studioIdx, "Alias") aliasCriterion := &models.StringCriterionInput{ diff --git a/pkg/sqlite/table.go b/pkg/sqlite/table.go index a04504281b0..2ae3bf9458c 100644 --- a/pkg/sqlite/table.go +++ b/pkg/sqlite/table.go @@ -155,6 +155,10 @@ func (t *table) join(j joiner, as string, parentIDCol string) { type joinTable struct { table fkColumn exp.IdentifierExpression + + // required for ordering + foreignTable *table + orderBy exp.OrderedExpression } func (t *joinTable) invert() *joinTable { @@ -170,6 +174,13 @@ func (t *joinTable) invert() *joinTable { func (t *joinTable) get(ctx context.Context, id int) ([]int, error) { q := dialect.Select(t.fkColumn).From(t.table.table).Where(t.idColumn.Eq(id)) + if t.orderBy != nil { + if t.foreignTable != nil { + q = q.InnerJoin(t.foreignTable.table, goqu.On(t.foreignTable.idColumn.Eq(t.fkColumn))) + } + q = q.Order(t.orderBy) + } + const single = false var ret []int if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error { @@ -193,8 +204,7 @@ func (t *joinTable) insertJoins(ctx context.Context, id int, foreignIDs []int) e // ignore duplicates q := fmt.Sprintf("INSERT INTO %s (%s, %s) VALUES (?, ?) ON CONFLICT (%[2]s, %s) DO NOTHING", t.table.table.GetTable(), t.idColumn.GetCol(), t.fkColumn.GetCol()) - tx := dbWrapper{} - stmt, err := tx.Prepare(ctx, q) + stmt, err := dbWrapper.Prepare(ctx, q) if err != nil { return err } @@ -204,7 +214,7 @@ func (t *joinTable) insertJoins(ctx context.Context, id int, foreignIDs []int) e foreignIDs = sliceutil.AppendUniques(nil, foreignIDs) for _, fk := range foreignIDs { - if _, err := tx.ExecStmt(ctx, stmt, id, fk); err != nil { + if _, err := dbWrapper.ExecStmt(ctx, stmt, id, fk); err != nil { return err } } @@ -578,30 +588,30 @@ func (t *orderedValueTable[T]) modifyJoins(ctx context.Context, id int, v []T, m return nil } -type scenesMoviesTable struct { +type scenesGroupsTable struct { table } -type moviesScenesRow struct { +type groupsScenesRow struct { SceneID null.Int `db:"scene_id"` - MovieID null.Int `db:"movie_id"` + GroupID null.Int `db:"group_id"` SceneIndex null.Int `db:"scene_index"` } -func (r moviesScenesRow) resolve(sceneID int) models.MoviesScenes { - return models.MoviesScenes{ - MovieID: int(r.MovieID.Int64), +func (r groupsScenesRow) resolve(sceneID int) models.GroupsScenes { + return models.GroupsScenes{ + GroupID: int(r.GroupID.Int64), SceneIndex: nullIntPtr(r.SceneIndex), } } -func (t *scenesMoviesTable) get(ctx context.Context, id int) ([]models.MoviesScenes, error) { - q := dialect.Select("movie_id", "scene_index").From(t.table.table).Where(t.idColumn.Eq(id)) +func (t *scenesGroupsTable) get(ctx context.Context, id int) ([]models.GroupsScenes, error) { + q := dialect.Select("group_id", "scene_index").From(t.table.table).Where(t.idColumn.Eq(id)) const single = false - var ret []models.MoviesScenes + var ret []models.GroupsScenes if err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error { - var v moviesScenesRow + var v groupsScenesRow if err := rows.StructScan(&v); err != nil { return err } @@ -610,15 +620,15 @@ func (t *scenesMoviesTable) get(ctx context.Context, id int) ([]models.MoviesSce return nil }); err != nil { - return nil, fmt.Errorf("getting scene movies from %s: %w", t.table.table.GetTable(), err) + return nil, fmt.Errorf("getting scene groups from %s: %w", t.table.table.GetTable(), err) } return ret, nil } -func (t *scenesMoviesTable) insertJoin(ctx context.Context, id int, v models.MoviesScenes) (sql.Result, error) { - q := dialect.Insert(t.table.table).Cols(t.idColumn.GetCol(), "movie_id", "scene_index").Vals( - goqu.Vals{id, v.MovieID, intFromPtr(v.SceneIndex)}, +func (t *scenesGroupsTable) insertJoin(ctx context.Context, id int, v models.GroupsScenes) (sql.Result, error) { + q := dialect.Insert(t.table.table).Cols(t.idColumn.GetCol(), "group_id", "scene_index").Vals( + goqu.Vals{id, v.GroupID, intFromPtr(v.SceneIndex)}, ) ret, err := exec(ctx, q) if err != nil { @@ -628,7 +638,7 @@ func (t *scenesMoviesTable) insertJoin(ctx context.Context, id int, v models.Mov return ret, nil } -func (t *scenesMoviesTable) insertJoins(ctx context.Context, id int, v []models.MoviesScenes) error { +func (t *scenesGroupsTable) insertJoins(ctx context.Context, id int, v []models.GroupsScenes) error { for _, fk := range v { if _, err := t.insertJoin(ctx, id, fk); err != nil { return err @@ -638,7 +648,7 @@ func (t *scenesMoviesTable) insertJoins(ctx context.Context, id int, v []models. return nil } -func (t *scenesMoviesTable) replaceJoins(ctx context.Context, id int, v []models.MoviesScenes) error { +func (t *scenesGroupsTable) replaceJoins(ctx context.Context, id int, v []models.GroupsScenes) error { if err := t.destroy(ctx, []int{id}); err != nil { return err } @@ -646,7 +656,7 @@ func (t *scenesMoviesTable) replaceJoins(ctx context.Context, id int, v []models return t.insertJoins(ctx, id, v) } -func (t *scenesMoviesTable) addJoins(ctx context.Context, id int, v []models.MoviesScenes) error { +func (t *scenesGroupsTable) addJoins(ctx context.Context, id int, v []models.GroupsScenes) error { // get existing foreign keys fks, err := t.get(ctx, id) if err != nil { @@ -654,12 +664,12 @@ func (t *scenesMoviesTable) addJoins(ctx context.Context, id int, v []models.Mov } // only add values that are not already present - var filtered []models.MoviesScenes + var filtered []models.GroupsScenes for _, vv := range v { found := false for _, e := range fks { - if vv.MovieID == e.MovieID { + if vv.GroupID == e.GroupID { found = true break } @@ -672,11 +682,11 @@ func (t *scenesMoviesTable) addJoins(ctx context.Context, id int, v []models.Mov return t.insertJoins(ctx, id, filtered) } -func (t *scenesMoviesTable) destroyJoins(ctx context.Context, id int, v []models.MoviesScenes) error { +func (t *scenesGroupsTable) destroyJoins(ctx context.Context, id int, v []models.GroupsScenes) error { for _, vv := range v { q := dialect.Delete(t.table.table).Where( t.idColumn.Eq(id), - t.table.table.Col("movie_id").Eq(vv.MovieID), + t.table.table.Col("group_id").Eq(vv.GroupID), ) if _, err := exec(ctx, q); err != nil { @@ -687,7 +697,7 @@ func (t *scenesMoviesTable) destroyJoins(ctx context.Context, id int, v []models return nil } -func (t *scenesMoviesTable) modifyJoins(ctx context.Context, id int, v []models.MoviesScenes, mode models.RelationshipUpdateMode) error { +func (t *scenesGroupsTable) modifyJoins(ctx context.Context, id int, v []models.GroupsScenes, mode models.RelationshipUpdateMode) error { switch mode { case models.RelationshipUpdateModeSet: return t.replaceJoins(ctx, id, v) @@ -700,6 +710,45 @@ func (t *scenesMoviesTable) modifyJoins(ctx context.Context, id int, v []models. return nil } +type imageGalleriesTable struct { + joinTable +} + +func (t *imageGalleriesTable) setCover(ctx context.Context, id int, galleryID int) error { + if err := t.resetCover(ctx, galleryID); err != nil { + return err + } + + table := t.table.table + + q := dialect.Update(table).Prepared(true).Set(goqu.Record{ + "cover": true, + }).Where(t.idColumn.Eq(id), table.Col(galleryIDColumn).Eq(galleryID)) + + if _, err := exec(ctx, q); err != nil { + return fmt.Errorf("setting cover flag in %s: %w", t.table.table.GetTable(), err) + } + + return nil +} + +func (t *imageGalleriesTable) resetCover(ctx context.Context, galleryID int) error { + table := t.table.table + + q := dialect.Update(table).Prepared(true).Set(goqu.Record{ + "cover": false, + }).Where( + table.Col(galleryIDColumn).Eq(galleryID), + table.Col("cover").Eq(true), + ) + + if _, err := exec(ctx, q); err != nil { + return fmt.Errorf("unsetting cover flags in %s: %w", t.table.table.GetTable(), err) + } + + return nil +} + type relatedFilesTable struct { table } @@ -1077,8 +1126,7 @@ func queryFunc(ctx context.Context, query *goqu.SelectDataset, single bool, f fu return err } - wrapper := dbWrapper{} - rows, err := wrapper.QueryxContext(ctx, q, args...) + rows, err := dbWrapper.QueryxContext(ctx, q, args...) if err != nil && !errors.Is(err, sql.ErrNoRows) { return fmt.Errorf("querying `%s` [%v]: %w", q, args, err) @@ -1107,8 +1155,7 @@ func querySimple(ctx context.Context, query *goqu.SelectDataset, out interface{} return err } - wrapper := dbWrapper{} - rows, err := wrapper.QueryxContext(ctx, q, args...) + rows, err := dbWrapper.QueryxContext(ctx, q, args...) if err != nil { return fmt.Errorf("querying `%s` [%v]: %w", q, args, err) } diff --git a/pkg/sqlite/tables.go b/pkg/sqlite/tables.go index 2eebf033f56..74a5ebe698c 100644 --- a/pkg/sqlite/tables.go +++ b/pkg/sqlite/tables.go @@ -25,15 +25,24 @@ var ( scenesTagsJoinTable = goqu.T(scenesTagsTable) scenesPerformersJoinTable = goqu.T(performersScenesTable) scenesStashIDsJoinTable = goqu.T("scene_stash_ids") - scenesMoviesJoinTable = goqu.T(moviesScenesTable) + scenesGroupsJoinTable = goqu.T(groupsScenesTable) scenesURLsJoinTable = goqu.T(scenesURLsTable) performersAliasesJoinTable = goqu.T(performersAliasesTable) + performersURLsJoinTable = goqu.T(performerURLsTable) performersTagsJoinTable = goqu.T(performersTagsTable) performersStashIDsJoinTable = goqu.T("performer_stash_ids") studiosAliasesJoinTable = goqu.T(studioAliasesTable) + studiosTagsJoinTable = goqu.T(studiosTagsTable) studiosStashIDsJoinTable = goqu.T("studio_stash_ids") + + groupsURLsJoinTable = goqu.T(groupURLsTable) + groupsTagsJoinTable = goqu.T(groupsTagsTable) + groupRelationsJoinTable = goqu.T(groupRelationsTable) + + tagsAliasesJoinTable = goqu.T(tagAliasesTable) + tagRelationsJoinTable = goqu.T(tagRelationsTable) ) var ( @@ -49,12 +58,14 @@ var ( }, } - imageGalleriesTableMgr = &joinTable{ - table: table{ - table: galleriesImagesJoinTable, - idColumn: galleriesImagesJoinTable.Col(imageIDColumn), + imageGalleriesTableMgr = &imageGalleriesTable{ + joinTable: joinTable{ + table: table{ + table: galleriesImagesJoinTable, + idColumn: galleriesImagesJoinTable.Col(imageIDColumn), + }, + fkColumn: galleriesImagesJoinTable.Col(galleryIDColumn), }, - fkColumn: galleriesImagesJoinTable.Col(galleryIDColumn), } imagesTagsTableMgr = &joinTable{ @@ -176,10 +187,10 @@ var ( }, } - scenesMoviesTableMgr = &scenesMoviesTable{ + scenesGroupsTableMgr = &scenesGroupsTable{ table: table{ - table: scenesMoviesJoinTable, - idColumn: scenesMoviesJoinTable.Col(sceneIDColumn), + table: scenesGroupsJoinTable, + idColumn: scenesGroupsJoinTable.Col(sceneIDColumn), }, } @@ -249,6 +260,14 @@ var ( stringColumn: performersAliasesJoinTable.Col(performerAliasColumn), } + performersURLsTableMgr = &orderedValueTable[string]{ + table: table{ + table: performersURLsJoinTable, + idColumn: performersURLsJoinTable.Col(performerIDColumn), + }, + valueColumn: performersURLsJoinTable.Col(performerURLColumn), + } + performersTagsTableMgr = &joinTable{ table: table{ table: performersTagsJoinTable, @@ -279,6 +298,14 @@ var ( stringColumn: studiosAliasesJoinTable.Col(studioAliasColumn), } + studiosTagsTableMgr = &joinTable{ + table: table{ + table: studiosTagsJoinTable, + idColumn: studiosTagsJoinTable.Col(studioIDColumn), + }, + fkColumn: studiosTagsJoinTable.Col(tagIDColumn), + } + studiosStashIDsTableMgr = &stashIDTable{ table: table{ table: studiosStashIDsJoinTable, @@ -292,12 +319,52 @@ var ( table: goqu.T(tagTable), idColumn: goqu.T(tagTable).Col(idColumn), } + + tagsAliasesTableMgr = &stringTable{ + table: table{ + table: tagsAliasesJoinTable, + idColumn: tagsAliasesJoinTable.Col(tagIDColumn), + }, + stringColumn: tagsAliasesJoinTable.Col(tagAliasColumn), + } + + tagsParentTagsTableMgr = &joinTable{ + table: table{ + table: tagRelationsJoinTable, + idColumn: tagRelationsJoinTable.Col(tagChildIDColumn), + }, + fkColumn: tagRelationsJoinTable.Col(tagParentIDColumn), + } + + tagsChildTagsTableMgr = *tagsParentTagsTableMgr.invert() ) var ( - movieTableMgr = &table{ - table: goqu.T(movieTable), - idColumn: goqu.T(movieTable).Col(idColumn), + groupTableMgr = &table{ + table: goqu.T(groupTable), + idColumn: goqu.T(groupTable).Col(idColumn), + } + + groupsURLsTableMgr = &orderedValueTable[string]{ + table: table{ + table: groupsURLsJoinTable, + idColumn: groupsURLsJoinTable.Col(groupIDColumn), + }, + valueColumn: groupsURLsJoinTable.Col(groupURLColumn), + } + + groupsTagsTableMgr = &joinTable{ + table: table{ + table: groupsTagsJoinTable, + idColumn: groupsTagsJoinTable.Col(groupIDColumn), + }, + fkColumn: groupsTagsJoinTable.Col(tagIDColumn), + foreignTable: tagTableMgr, + orderBy: tagTableMgr.table.Col("name").Asc(), + } + + groupRelationshipTableMgr = &table{ + table: groupRelationsJoinTable, } ) diff --git a/pkg/sqlite/tag.go b/pkg/sqlite/tag.go index cfed64bfce7..42bdd9bbe45 100644 --- a/pkg/sqlite/tag.go +++ b/pkg/sqlite/tag.go @@ -24,6 +24,10 @@ const ( tagAliasColumn = "alias" tagImageBlobColumn = "image_blob" + + tagRelationsTable = "tags_relations" + tagParentIDColumn = "parent_id" + tagChildIDColumn = "child_id" ) type tagRow struct { @@ -90,19 +94,64 @@ func (r *tagRowRecord) fromPartial(o models.TagPartial) { r.setTimestamp("updated_at", o.UpdatedAt) } -type TagStore struct { +type tagRepositoryType struct { repository - blobJoinQueryBuilder - tableMgr *table + aliases stringRepository + + scenes joinRepository + images joinRepository + galleries joinRepository } -func NewTagStore(blobStore *BlobStore) *TagStore { - return &TagStore{ +var ( + tagRepository = tagRepositoryType{ repository: repository{ tableName: tagTable, idColumn: idColumn, }, + aliases: stringRepository{ + repository: repository{ + tableName: tagAliasesTable, + idColumn: tagIDColumn, + }, + stringColumn: tagAliasColumn, + }, + scenes: joinRepository{ + repository: repository{ + tableName: scenesTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: sceneIDColumn, + foreignTable: sceneTable, + }, + images: joinRepository{ + repository: repository{ + tableName: imagesTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: imageIDColumn, + foreignTable: imageTable, + }, + galleries: joinRepository{ + repository: repository{ + tableName: galleriesTagsTable, + idColumn: tagIDColumn, + }, + fkColumn: galleryIDColumn, + foreignTable: galleryTable, + }, + } +) + +type TagStore struct { + blobJoinQueryBuilder + + tableMgr *table +} + +func NewTagStore(blobStore *BlobStore) *TagStore { + return &TagStore{ blobJoinQueryBuilder: blobJoinQueryBuilder{ blobStore: blobStore, joinTable: tagTable, @@ -128,6 +177,24 @@ func (qb *TagStore) Create(ctx context.Context, newObject *models.Tag) error { return err } + if newObject.Aliases.Loaded() { + if err := tagsAliasesTableMgr.insertJoins(ctx, id, newObject.Aliases.List()); err != nil { + return err + } + } + + if newObject.ParentIDs.Loaded() { + if err := tagsParentTagsTableMgr.insertJoins(ctx, id, newObject.ParentIDs.List()); err != nil { + return err + } + } + + if newObject.ChildIDs.Loaded() { + if err := tagsChildTagsTableMgr.insertJoins(ctx, id, newObject.ChildIDs.List()); err != nil { + return err + } + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) @@ -153,6 +220,24 @@ func (qb *TagStore) UpdatePartial(ctx context.Context, id int, partial models.Ta } } + if partial.Aliases != nil { + if err := tagsAliasesTableMgr.modifyJoins(ctx, id, partial.Aliases.Values, partial.Aliases.Mode); err != nil { + return nil, err + } + } + + if partial.ParentIDs != nil { + if err := tagsParentTagsTableMgr.modifyJoins(ctx, id, partial.ParentIDs.IDs, partial.ParentIDs.Mode); err != nil { + return nil, err + } + } + + if partial.ChildIDs != nil { + if err := tagsChildTagsTableMgr.modifyJoins(ctx, id, partial.ChildIDs.IDs, partial.ChildIDs.Mode); err != nil { + return nil, err + } + } + return qb.find(ctx, id) } @@ -164,6 +249,24 @@ func (qb *TagStore) Update(ctx context.Context, updatedObject *models.Tag) error return err } + if updatedObject.Aliases.Loaded() { + if err := tagsAliasesTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.Aliases.List()); err != nil { + return err + } + } + + if updatedObject.ParentIDs.Loaded() { + if err := tagsParentTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.ParentIDs.List()); err != nil { + return err + } + } + + if updatedObject.ChildIDs.Loaded() { + if err := tagsChildTagsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.ChildIDs.List()); err != nil { + return err + } + } + return nil } @@ -176,7 +279,7 @@ func (qb *TagStore) Destroy(ctx context.Context, id int) error { // cannot unset primary_tag_id in scene_markers because it is not nullable countQuery := "SELECT COUNT(*) as count FROM scene_markers where primary_tag_id = ?" args := []interface{}{id} - primaryMarkers, err := qb.runCountQuery(ctx, countQuery, args) + primaryMarkers, err := tagRepository.runCountQuery(ctx, countQuery, args) if err != nil { return err } @@ -185,7 +288,7 @@ func (qb *TagStore) Destroy(ctx context.Context, id int) error { return errors.New("cannot delete tag used as a primary tag in scene markers") } - return qb.destroyExisting(ctx, []int{id}) + return tagRepository.destroyExisting(ctx, []int{id}) } // returns nil, nil if not found @@ -321,6 +424,18 @@ func (qb *TagStore) FindByGalleryID(ctx context.Context, galleryID int) ([]*mode return qb.queryTags(ctx, query, args) } +func (qb *TagStore) FindByGroupID(ctx context.Context, groupID int) ([]*models.Tag, error) { + query := ` + SELECT tags.* FROM tags + LEFT JOIN groups_tags as groups_join on groups_join.tag_id = tags.id + WHERE groups_join.group_id = ? + GROUP BY tags.id + ` + query += qb.getDefaultTagSort() + args := []interface{}{groupID} + return qb.queryTags(ctx, query, args) +} + func (qb *TagStore) FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) { query := ` SELECT tags.* FROM tags @@ -333,6 +448,18 @@ func (qb *TagStore) FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) return qb.queryTags(ctx, query, args) } +func (qb *TagStore) FindByStudioID(ctx context.Context, studioID int) ([]*models.Tag, error) { + query := ` + SELECT tags.* FROM tags + LEFT JOIN studios_tags as studios_join on studios_join.tag_id = tags.id + WHERE studios_join.studio_id = ? + GROUP BY tags.id + ` + query += qb.getDefaultTagSort() + args := []interface{}{studioID} + return qb.queryTags(ctx, query, args) +} + func (qb *TagStore) FindByName(ctx context.Context, name string, nocase bool) (*models.Tag, error) { // query := "SELECT * FROM tags WHERE name = ?" // if nocase { @@ -378,6 +505,14 @@ func (qb *TagStore) FindByNames(ctx context.Context, names []string, nocase bool return ret, nil } +func (qb *TagStore) GetParentIDs(ctx context.Context, relatedID int) ([]int, error) { + return tagsParentTagsTableMgr.get(ctx, relatedID) +} + +func (qb *TagStore) GetChildIDs(ctx context.Context, relatedID int) ([]int, error) { + return tagsChildTagsTableMgr.get(ctx, relatedID) +} + func (qb *TagStore) FindByParentTagID(ctx context.Context, parentID int) ([]*models.Tag, error) { query := ` SELECT tags.* FROM tags @@ -455,73 +590,6 @@ func (qb *TagStore) QueryForAutoTag(ctx context.Context, words []string) ([]*mod return qb.queryTags(ctx, query+" WHERE "+where, args) } -func (qb *TagStore) validateFilter(tagFilter *models.TagFilterType) error { - const and = "AND" - const or = "OR" - const not = "NOT" - - if tagFilter.And != nil { - if tagFilter.Or != nil { - return illegalFilterCombination(and, or) - } - if tagFilter.Not != nil { - return illegalFilterCombination(and, not) - } - - return qb.validateFilter(tagFilter.And) - } - - if tagFilter.Or != nil { - if tagFilter.Not != nil { - return illegalFilterCombination(or, not) - } - - return qb.validateFilter(tagFilter.Or) - } - - if tagFilter.Not != nil { - return qb.validateFilter(tagFilter.Not) - } - - return nil -} - -func (qb *TagStore) makeFilter(ctx context.Context, tagFilter *models.TagFilterType) *filterBuilder { - query := &filterBuilder{} - - if tagFilter.And != nil { - query.and(qb.makeFilter(ctx, tagFilter.And)) - } - if tagFilter.Or != nil { - query.or(qb.makeFilter(ctx, tagFilter.Or)) - } - if tagFilter.Not != nil { - query.not(qb.makeFilter(ctx, tagFilter.Not)) - } - - query.handleCriterion(ctx, stringCriterionHandler(tagFilter.Name, tagTable+".name")) - query.handleCriterion(ctx, tagAliasCriterionHandler(qb, tagFilter.Aliases)) - - query.handleCriterion(ctx, boolCriterionHandler(tagFilter.Favorite, tagTable+".favorite", nil)) - query.handleCriterion(ctx, stringCriterionHandler(tagFilter.Description, tagTable+".description")) - query.handleCriterion(ctx, boolCriterionHandler(tagFilter.IgnoreAutoTag, tagTable+".ignore_auto_tag", nil)) - - query.handleCriterion(ctx, tagIsMissingCriterionHandler(qb, tagFilter.IsMissing)) - query.handleCriterion(ctx, tagSceneCountCriterionHandler(qb, tagFilter.SceneCount)) - query.handleCriterion(ctx, tagImageCountCriterionHandler(qb, tagFilter.ImageCount)) - query.handleCriterion(ctx, tagGalleryCountCriterionHandler(qb, tagFilter.GalleryCount)) - query.handleCriterion(ctx, tagPerformerCountCriterionHandler(qb, tagFilter.PerformerCount)) - query.handleCriterion(ctx, tagMarkerCountCriterionHandler(qb, tagFilter.MarkerCount)) - query.handleCriterion(ctx, tagParentsCriterionHandler(qb, tagFilter.Parents)) - query.handleCriterion(ctx, tagChildrenCriterionHandler(qb, tagFilter.Children)) - query.handleCriterion(ctx, tagParentCountCriterionHandler(qb, tagFilter.ParentCount)) - query.handleCriterion(ctx, tagChildCountCriterionHandler(qb, tagFilter.ChildCount)) - query.handleCriterion(ctx, timestampCriterionHandler(tagFilter.CreatedAt, "tags.created_at")) - query.handleCriterion(ctx, timestampCriterionHandler(tagFilter.UpdatedAt, "tags.updated_at")) - - return query -} - func (qb *TagStore) Query(ctx context.Context, tagFilter *models.TagFilterType, findFilter *models.FindFilterType) ([]*models.Tag, int, error) { if tagFilter == nil { tagFilter = &models.TagFilterType{} @@ -530,7 +598,7 @@ func (qb *TagStore) Query(ctx context.Context, tagFilter *models.TagFilterType, findFilter = &models.FindFilterType{} } - query := qb.newQuery() + query := tagRepository.newQuery() distinctIDs(&query, tagTable) if q := findFilter.Q; q != nil && *q != "" { @@ -539,10 +607,9 @@ func (qb *TagStore) Query(ctx context.Context, tagFilter *models.TagFilterType, query.parseQueryString(searchColumns, *q) } - if err := qb.validateFilter(tagFilter); err != nil { - return nil, 0, err - } - filter := qb.makeFilter(ctx, tagFilter) + filter := filterBuilderFromHandler(ctx, &tagFilterHandler{ + tagFilter: tagFilter, + }) if err := query.addFilter(filter); err != nil { return nil, 0, err @@ -567,302 +634,14 @@ func (qb *TagStore) Query(ctx context.Context, tagFilter *models.TagFilterType, return tags, countResult, nil } -func tagAliasCriterionHandler(qb *TagStore, alias *models.StringCriterionInput) criterionHandlerFunc { - h := stringListCriterionHandlerBuilder{ - joinTable: tagAliasesTable, - stringColumn: tagAliasColumn, - addJoinTable: func(f *filterBuilder) { - qb.aliasRepository().join(f, "", "tags.id") - }, - } - - return h.handler(alias) -} - -func tagIsMissingCriterionHandler(qb *TagStore, isMissing *string) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if isMissing != nil && *isMissing != "" { - switch *isMissing { - case "image": - f.addWhere("tags.image_blob IS NULL") - default: - f.addWhere("(tags." + *isMissing + " IS NULL OR TRIM(tags." + *isMissing + ") = '')") - } - } - } -} - -func tagSceneCountCriterionHandler(qb *TagStore, sceneCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if sceneCount != nil { - f.addLeftJoin("scenes_tags", "", "scenes_tags.tag_id = tags.id") - clause, args := getIntCriterionWhereClause("count(distinct scenes_tags.scene_id)", *sceneCount) - - f.addHaving(clause, args...) - } - } -} - -func tagImageCountCriterionHandler(qb *TagStore, imageCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if imageCount != nil { - f.addLeftJoin("images_tags", "", "images_tags.tag_id = tags.id") - clause, args := getIntCriterionWhereClause("count(distinct images_tags.image_id)", *imageCount) - - f.addHaving(clause, args...) - } - } -} - -func tagGalleryCountCriterionHandler(qb *TagStore, galleryCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if galleryCount != nil { - f.addLeftJoin("galleries_tags", "", "galleries_tags.tag_id = tags.id") - clause, args := getIntCriterionWhereClause("count(distinct galleries_tags.gallery_id)", *galleryCount) - - f.addHaving(clause, args...) - } - } -} - -func tagPerformerCountCriterionHandler(qb *TagStore, performerCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if performerCount != nil { - f.addLeftJoin("performers_tags", "", "performers_tags.tag_id = tags.id") - clause, args := getIntCriterionWhereClause("count(distinct performers_tags.performer_id)", *performerCount) - - f.addHaving(clause, args...) - } - } -} - -func tagMarkerCountCriterionHandler(qb *TagStore, markerCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if markerCount != nil { - f.addLeftJoin("scene_markers_tags", "", "scene_markers_tags.tag_id = tags.id") - f.addLeftJoin("scene_markers", "", "scene_markers_tags.scene_marker_id = scene_markers.id OR scene_markers.primary_tag_id = tags.id") - clause, args := getIntCriterionWhereClause("count(distinct scene_markers.id)", *markerCount) - - f.addHaving(clause, args...) - } - } -} - -func tagParentsCriterionHandler(qb *TagStore, criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { - tags := criterion.CombineExcludes() - - // validate the modifier - switch tags.Modifier { - case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: - // valid - default: - f.setError(fmt.Errorf("invalid modifier %s for tag parent/children", criterion.Modifier)) - } - - if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { - var notClause string - if tags.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin("tags_relations", "parent_relations", "tags.id = parent_relations.child_id") - - f.addWhere(fmt.Sprintf("parent_relations.parent_id IS %s NULL", notClause)) - return - } - - if len(tags.Value) == 0 && len(tags.Excludes) == 0 { - return - } - - if len(tags.Value) > 0 { - var args []interface{} - for _, val := range tags.Value { - args = append(args, val) - } - - depthVal := 0 - if tags.Depth != nil { - depthVal = *tags.Depth - } - - var depthCondition string - if depthVal != -1 { - depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) - } - - query := `parents AS ( - SELECT parent_id AS root_id, child_id AS item_id, 0 AS depth FROM tags_relations WHERE parent_id IN` + getInBinding(len(tags.Value)) + ` - UNION - SELECT root_id, child_id, depth + 1 FROM tags_relations INNER JOIN parents ON item_id = parent_id ` + depthCondition + ` - )` - - f.addRecursiveWith(query, args...) - - f.addLeftJoin("parents", "", "parents.item_id = tags.id") - - addHierarchicalConditionClauses(f, tags, "parents", "root_id") - } - - if len(tags.Excludes) > 0 { - var args []interface{} - for _, val := range tags.Excludes { - args = append(args, val) - } - - depthVal := 0 - if tags.Depth != nil { - depthVal = *tags.Depth - } - - var depthCondition string - if depthVal != -1 { - depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) - } - - query := `parents2 AS ( - SELECT parent_id AS root_id, child_id AS item_id, 0 AS depth FROM tags_relations WHERE parent_id IN` + getInBinding(len(tags.Excludes)) + ` - UNION - SELECT root_id, child_id, depth + 1 FROM tags_relations INNER JOIN parents2 ON item_id = parent_id ` + depthCondition + ` - )` - - f.addRecursiveWith(query, args...) - - f.addLeftJoin("parents2", "", "parents2.item_id = tags.id") - - addHierarchicalConditionClauses(f, models.HierarchicalMultiCriterionInput{ - Value: tags.Excludes, - Depth: tags.Depth, - Modifier: models.CriterionModifierExcludes, - }, "parents2", "root_id") - } - } - } -} - -func tagChildrenCriterionHandler(qb *TagStore, criterion *models.HierarchicalMultiCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if criterion != nil { - tags := criterion.CombineExcludes() - - // validate the modifier - switch tags.Modifier { - case models.CriterionModifierIncludesAll, models.CriterionModifierIncludes, models.CriterionModifierExcludes, models.CriterionModifierIsNull, models.CriterionModifierNotNull: - // valid - default: - f.setError(fmt.Errorf("invalid modifier %s for tag parent/children", criterion.Modifier)) - } - - if tags.Modifier == models.CriterionModifierIsNull || tags.Modifier == models.CriterionModifierNotNull { - var notClause string - if tags.Modifier == models.CriterionModifierNotNull { - notClause = "NOT" - } - - f.addLeftJoin("tags_relations", "child_relations", "tags.id = child_relations.parent_id") - - f.addWhere(fmt.Sprintf("child_relations.child_id IS %s NULL", notClause)) - return - } - - if len(tags.Value) == 0 && len(tags.Excludes) == 0 { - return - } - - if len(tags.Value) > 0 { - var args []interface{} - for _, val := range tags.Value { - args = append(args, val) - } - - depthVal := 0 - if tags.Depth != nil { - depthVal = *tags.Depth - } - - var depthCondition string - if depthVal != -1 { - depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) - } - - query := `children AS ( - SELECT child_id AS root_id, parent_id AS item_id, 0 AS depth FROM tags_relations WHERE child_id IN` + getInBinding(len(tags.Value)) + ` - UNION - SELECT root_id, parent_id, depth + 1 FROM tags_relations INNER JOIN children ON item_id = child_id ` + depthCondition + ` - )` - - f.addRecursiveWith(query, args...) - - f.addLeftJoin("children", "", "children.item_id = tags.id") - - addHierarchicalConditionClauses(f, tags, "children", "root_id") - } - - if len(tags.Excludes) > 0 { - var args []interface{} - for _, val := range tags.Excludes { - args = append(args, val) - } - - depthVal := 0 - if tags.Depth != nil { - depthVal = *tags.Depth - } - - var depthCondition string - if depthVal != -1 { - depthCondition = fmt.Sprintf("WHERE depth < %d", depthVal) - } - - query := `children2 AS ( - SELECT child_id AS root_id, parent_id AS item_id, 0 AS depth FROM tags_relations WHERE child_id IN` + getInBinding(len(tags.Excludes)) + ` - UNION - SELECT root_id, parent_id, depth + 1 FROM tags_relations INNER JOIN children2 ON item_id = child_id ` + depthCondition + ` - )` - - f.addRecursiveWith(query, args...) - - f.addLeftJoin("children2", "", "children2.item_id = tags.id") - - addHierarchicalConditionClauses(f, models.HierarchicalMultiCriterionInput{ - Value: tags.Excludes, - Depth: tags.Depth, - Modifier: models.CriterionModifierExcludes, - }, "children2", "root_id") - } - } - } -} - -func tagParentCountCriterionHandler(qb *TagStore, parentCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if parentCount != nil { - f.addLeftJoin("tags_relations", "parents_count", "parents_count.child_id = tags.id") - clause, args := getIntCriterionWhereClause("count(distinct parents_count.parent_id)", *parentCount) - - f.addHaving(clause, args...) - } - } -} - -func tagChildCountCriterionHandler(qb *TagStore, childCount *models.IntCriterionInput) criterionHandlerFunc { - return func(ctx context.Context, f *filterBuilder) { - if childCount != nil { - f.addLeftJoin("tags_relations", "children_count", "children_count.parent_id = tags.id") - clause, args := getIntCriterionWhereClause("count(distinct children_count.child_id)", *childCount) - - f.addHaving(clause, args...) - } - } -} - var tagSortOptions = sortOptions{ "created_at", "galleries_count", + "groups_count", "id", "images_count", + "movies_count", + "studios_count", "name", "performers_count", "random", @@ -903,6 +682,10 @@ func (qb *TagStore) getTagSort(query *queryBuilder, findFilter *models.FindFilte sortQuery += getCountSort(tagTable, galleriesTagsTable, tagIDColumn, direction) case "performers_count": sortQuery += getCountSort(tagTable, performersTagsTable, tagIDColumn, direction) + case "studios_count": + sortQuery += getCountSort(tagTable, studiosTagsTable, tagIDColumn, direction) + case "movies_count", "groups_count": + sortQuery += getCountSort(tagTable, groupsTagsTable, tagIDColumn, direction) default: sortQuery += getSort(sort, direction, "tags") } @@ -915,7 +698,7 @@ func (qb *TagStore) getTagSort(query *queryBuilder, findFilter *models.FindFilte func (qb *TagStore) queryTags(ctx context.Context, query string, args []interface{}) ([]*models.Tag, error) { const single = false var ret []*models.Tag - if err := qb.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { + if err := tagRepository.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { var f tagRow if err := r.StructScan(&f); err != nil { return err @@ -935,7 +718,7 @@ func (qb *TagStore) queryTags(ctx context.Context, query string, args []interfac func (qb *TagStore) queryTagPaths(ctx context.Context, query string, args []interface{}) ([]*models.TagPath, error) { const single = false var ret []*models.TagPath - if err := qb.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { + if err := tagRepository.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { var f tagPathRow if err := r.StructScan(&f); err != nil { return err @@ -968,23 +751,12 @@ func (qb *TagStore) destroyImage(ctx context.Context, tagID int) error { return qb.blobJoinQueryBuilder.DestroyImage(ctx, tagID, tagImageBlobColumn) } -func (qb *TagStore) aliasRepository() *stringRepository { - return &stringRepository{ - repository: repository{ - tx: qb.tx, - tableName: tagAliasesTable, - idColumn: tagIDColumn, - }, - stringColumn: tagAliasColumn, - } -} - func (qb *TagStore) GetAliases(ctx context.Context, tagID int) ([]string, error) { - return qb.aliasRepository().get(ctx, tagID) + return tagRepository.aliases.get(ctx, tagID) } func (qb *TagStore) UpdateAliases(ctx context.Context, tagID int, aliases []string) error { - return qb.aliasRepository().replace(ctx, tagID, aliases) + return tagRepository.aliases.replace(ctx, tagID, aliases) } func (qb *TagStore) Merge(ctx context.Context, source []int, destination int) error { @@ -1011,11 +783,12 @@ func (qb *TagStore) Merge(ctx context.Context, source []int, destination int) er galleriesTagsTable: galleryIDColumn, imagesTagsTable: imageIDColumn, "performers_tags": "performer_id", + "studios_tags": "studio_id", } args = append(args, destination) for table, idColumn := range tagTables { - _, err := qb.tx.Exec(ctx, `UPDATE OR IGNORE `+table+` + _, err := dbWrapper.Exec(ctx, `UPDATE OR IGNORE `+table+` SET tag_id = ? WHERE tag_id IN `+inBinding+` AND NOT EXISTS(SELECT 1 FROM `+table+` o WHERE o.`+idColumn+` = `+table+`.`+idColumn+` AND o.tag_id = ?)`, @@ -1026,22 +799,22 @@ AND NOT EXISTS(SELECT 1 FROM `+table+` o WHERE o.`+idColumn+` = `+table+`.`+idCo } // delete source tag ids from the table where they couldn't be set - if _, err := qb.tx.Exec(ctx, `DELETE FROM `+table+` WHERE tag_id IN `+inBinding, srcArgs...); err != nil { + if _, err := dbWrapper.Exec(ctx, `DELETE FROM `+table+` WHERE tag_id IN `+inBinding, srcArgs...); err != nil { return err } } - _, err := qb.tx.Exec(ctx, "UPDATE "+sceneMarkerTable+" SET primary_tag_id = ? WHERE primary_tag_id IN "+inBinding, args...) + _, err := dbWrapper.Exec(ctx, "UPDATE "+sceneMarkerTable+" SET primary_tag_id = ? WHERE primary_tag_id IN "+inBinding, args...) if err != nil { return err } - _, err = qb.tx.Exec(ctx, "INSERT INTO "+tagAliasesTable+" (tag_id, alias) SELECT ?, name FROM "+tagTable+" WHERE id IN "+inBinding, args...) + _, err = dbWrapper.Exec(ctx, "INSERT INTO "+tagAliasesTable+" (tag_id, alias) SELECT ?, name FROM "+tagTable+" WHERE id IN "+inBinding, args...) if err != nil { return err } - _, err = qb.tx.Exec(ctx, "UPDATE "+tagAliasesTable+" SET tag_id = ? WHERE tag_id IN "+inBinding, args...) + _, err = dbWrapper.Exec(ctx, "UPDATE "+tagAliasesTable+" SET tag_id = ? WHERE tag_id IN "+inBinding, args...) if err != nil { return err } @@ -1057,8 +830,7 @@ AND NOT EXISTS(SELECT 1 FROM `+table+` o WHERE o.`+idColumn+` = `+table+`.`+idCo } func (qb *TagStore) UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error { - tx := qb.tx - if _, err := tx.Exec(ctx, "DELETE FROM tags_relations WHERE child_id = ?", tagID); err != nil { + if _, err := dbWrapper.Exec(ctx, "DELETE FROM tags_relations WHERE child_id = ?", tagID); err != nil { return err } @@ -1071,7 +843,7 @@ func (qb *TagStore) UpdateParentTags(ctx context.Context, tagID int, parentIDs [ } query := "INSERT INTO tags_relations (parent_id, child_id) VALUES " + strings.Join(values, ", ") - if _, err := tx.Exec(ctx, query, args...); err != nil { + if _, err := dbWrapper.Exec(ctx, query, args...); err != nil { return err } } @@ -1080,8 +852,7 @@ func (qb *TagStore) UpdateParentTags(ctx context.Context, tagID int, parentIDs [ } func (qb *TagStore) UpdateChildTags(ctx context.Context, tagID int, childIDs []int) error { - tx := qb.tx - if _, err := tx.Exec(ctx, "DELETE FROM tags_relations WHERE parent_id = ?", tagID); err != nil { + if _, err := dbWrapper.Exec(ctx, "DELETE FROM tags_relations WHERE parent_id = ?", tagID); err != nil { return err } @@ -1094,7 +865,7 @@ func (qb *TagStore) UpdateChildTags(ctx context.Context, tagID int, childIDs []i } query := "INSERT INTO tags_relations (parent_id, child_id) VALUES " + strings.Join(values, ", ") - if _, err := tx.Exec(ctx, query, args...); err != nil { + if _, err := dbWrapper.Exec(ctx, query, args...); err != nil { return err } } @@ -1149,3 +920,17 @@ SELECT t.*, c.path FROM tags t INNER JOIN children c ON t.id = c.child_id return qb.queryTagPaths(ctx, query, args) } + +type tagRelationshipStore struct { + idRelationshipStore +} + +func (s *tagRelationshipStore) CountByTagID(ctx context.Context, tagID int) (int, error) { + joinTable := s.joinTable.table.table + q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(tagIDColumn).Eq(tagID)) + return count(ctx, q) +} + +func (s *tagRelationshipStore) GetTagIDs(ctx context.Context, id int) ([]int, error) { + return s.joinTable.get(ctx, id) +} diff --git a/pkg/sqlite/tag_filter.go b/pkg/sqlite/tag_filter.go new file mode 100644 index 00000000000..ba9e9bb08ec --- /dev/null +++ b/pkg/sqlite/tag_filter.go @@ -0,0 +1,221 @@ +package sqlite + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" +) + +type tagFilterHandler struct { + tagFilter *models.TagFilterType +} + +func (qb *tagFilterHandler) validate() error { + tagFilter := qb.tagFilter + if tagFilter == nil { + return nil + } + + if err := validateFilterCombination(tagFilter.OperatorFilter); err != nil { + return err + } + + if subFilter := tagFilter.SubFilter(); subFilter != nil { + sqb := &tagFilterHandler{tagFilter: subFilter} + if err := sqb.validate(); err != nil { + return err + } + } + + return nil +} + +func (qb *tagFilterHandler) handle(ctx context.Context, f *filterBuilder) { + tagFilter := qb.tagFilter + if tagFilter == nil { + return + } + + if err := qb.validate(); err != nil { + f.setError(err) + return + } + + sf := tagFilter.SubFilter() + if sf != nil { + sub := &tagFilterHandler{sf} + handleSubFilter(ctx, sub, f, tagFilter.OperatorFilter) + } + + f.handleCriterion(ctx, qb.criterionHandler()) +} + +var tagHierarchyHandler = hierarchicalRelationshipHandler{ + primaryTable: tagTable, + relationTable: tagRelationsTable, + aliasPrefix: tagTable, + parentIDCol: "parent_id", + childIDCol: "child_id", +} + +func (qb *tagFilterHandler) criterionHandler() criterionHandler { + tagFilter := qb.tagFilter + return compoundHandler{ + stringCriterionHandler(tagFilter.Name, tagTable+".name"), + qb.aliasCriterionHandler(tagFilter.Aliases), + + boolCriterionHandler(tagFilter.Favorite, tagTable+".favorite", nil), + stringCriterionHandler(tagFilter.Description, tagTable+".description"), + boolCriterionHandler(tagFilter.IgnoreAutoTag, tagTable+".ignore_auto_tag", nil), + + qb.isMissingCriterionHandler(tagFilter.IsMissing), + qb.sceneCountCriterionHandler(tagFilter.SceneCount), + qb.imageCountCriterionHandler(tagFilter.ImageCount), + qb.galleryCountCriterionHandler(tagFilter.GalleryCount), + qb.performerCountCriterionHandler(tagFilter.PerformerCount), + qb.studioCountCriterionHandler(tagFilter.StudioCount), + + qb.groupCountCriterionHandler(tagFilter.GroupCount), + qb.groupCountCriterionHandler(tagFilter.MovieCount), + + qb.markerCountCriterionHandler(tagFilter.MarkerCount), + tagHierarchyHandler.ParentsCriterionHandler(tagFilter.Parents), + tagHierarchyHandler.ChildrenCriterionHandler(tagFilter.Children), + tagHierarchyHandler.ParentCountCriterionHandler(tagFilter.ParentCount), + tagHierarchyHandler.ChildCountCriterionHandler(tagFilter.ChildCount), + ×tampCriterionHandler{tagFilter.CreatedAt, "tags.created_at", nil}, + ×tampCriterionHandler{tagFilter.UpdatedAt, "tags.updated_at", nil}, + + &relatedFilterHandler{ + relatedIDCol: "scenes_tags.scene_id", + relatedRepo: sceneRepository.repository, + relatedHandler: &sceneFilterHandler{tagFilter.ScenesFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.scenes.innerJoin(f, "", "tags.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "images_tags.image_id", + relatedRepo: imageRepository.repository, + relatedHandler: &imageFilterHandler{tagFilter.ImagesFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.images.innerJoin(f, "", "tags.id") + }, + }, + + &relatedFilterHandler{ + relatedIDCol: "galleries_tags.gallery_id", + relatedRepo: galleryRepository.repository, + relatedHandler: &galleryFilterHandler{tagFilter.GalleriesFilter}, + joinFn: func(f *filterBuilder) { + tagRepository.galleries.innerJoin(f, "", "tags.id") + }, + }, + } +} + +func (qb *tagFilterHandler) aliasCriterionHandler(alias *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + primaryTable: tagTable, + primaryFK: tagIDColumn, + joinTable: tagAliasesTable, + stringColumn: tagAliasColumn, + addJoinTable: func(f *filterBuilder) { + tagRepository.aliases.join(f, "", "tags.id") + }, + } + + return h.handler(alias) +} + +func (qb *tagFilterHandler) isMissingCriterionHandler(isMissing *string) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if isMissing != nil && *isMissing != "" { + switch *isMissing { + case "image": + f.addWhere("tags.image_blob IS NULL") + default: + f.addWhere("(tags." + *isMissing + " IS NULL OR TRIM(tags." + *isMissing + ") = '')") + } + } + } +} + +func (qb *tagFilterHandler) sceneCountCriterionHandler(sceneCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if sceneCount != nil { + f.addLeftJoin("scenes_tags", "", "scenes_tags.tag_id = tags.id") + clause, args := getIntCriterionWhereClause("count(distinct scenes_tags.scene_id)", *sceneCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *tagFilterHandler) imageCountCriterionHandler(imageCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if imageCount != nil { + f.addLeftJoin("images_tags", "", "images_tags.tag_id = tags.id") + clause, args := getIntCriterionWhereClause("count(distinct images_tags.image_id)", *imageCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *tagFilterHandler) galleryCountCriterionHandler(galleryCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if galleryCount != nil { + f.addLeftJoin("galleries_tags", "", "galleries_tags.tag_id = tags.id") + clause, args := getIntCriterionWhereClause("count(distinct galleries_tags.gallery_id)", *galleryCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *tagFilterHandler) performerCountCriterionHandler(performerCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if performerCount != nil { + f.addLeftJoin("performers_tags", "", "performers_tags.tag_id = tags.id") + clause, args := getIntCriterionWhereClause("count(distinct performers_tags.performer_id)", *performerCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *tagFilterHandler) studioCountCriterionHandler(studioCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if studioCount != nil { + f.addLeftJoin("studios_tags", "", "studios_tags.tag_id = tags.id") + clause, args := getIntCriterionWhereClause("count(distinct studios_tags.studio_id)", *studioCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *tagFilterHandler) groupCountCriterionHandler(groupCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if groupCount != nil { + f.addLeftJoin("groups_tags", "", "groups_tags.tag_id = tags.id") + clause, args := getIntCriterionWhereClause("count(distinct groups_tags.group_id)", *groupCount) + + f.addHaving(clause, args...) + } + } +} + +func (qb *tagFilterHandler) markerCountCriterionHandler(markerCount *models.IntCriterionInput) criterionHandlerFunc { + return func(ctx context.Context, f *filterBuilder) { + if markerCount != nil { + f.addLeftJoin("scene_markers_tags", "", "scene_markers_tags.tag_id = tags.id") + f.addLeftJoin("scene_markers", "", "scene_markers_tags.scene_marker_id = scene_markers.id OR scene_markers.primary_tag_id = tags.id") + clause, args := getIntCriterionWhereClause("count(distinct scene_markers.id)", *markerCount) + + f.addHaving(clause, args...) + } + } +} diff --git a/pkg/sqlite/tag_test.go b/pkg/sqlite/tag_test.go index a44232720b7..5359be78517 100644 --- a/pkg/sqlite/tag_test.go +++ b/pkg/sqlite/tag_test.go @@ -42,6 +42,33 @@ func TestMarkerFindBySceneMarkerID(t *testing.T) { }) } +func TestTagFindByGroupID(t *testing.T) { + withTxn(func(ctx context.Context) error { + tqb := db.Tag + + groupID := groupIDs[groupIdxWithTag] + + tags, err := tqb.FindByGroupID(ctx, groupID) + + if err != nil { + t.Errorf("Error finding tags: %s", err.Error()) + } + + assert.Len(t, tags, 1) + assert.Equal(t, tagIDs[tagIdxWithGroup], tags[0].ID) + + tags, err = tqb.FindByGroupID(ctx, 0) + + if err != nil { + t.Errorf("Error finding tags: %s", err.Error()) + } + + assert.Len(t, tags, 0) + + return nil + }) +} + func TestTagFindByName(t *testing.T) { withTxn(func(ctx context.Context) error { tqb := db.Tag @@ -203,6 +230,14 @@ func TestTagQuerySort(t *testing.T) { tags = queryTags(ctx, t, sqb, nil, findFilter) assert.Equal(tagIDs[tagIdx2WithPerformer], tags[0].ID) + sortBy = "studios_count" + tags = queryTags(ctx, t, sqb, nil, findFilter) + assert.Equal(tagIDs[tagIdx2WithStudio], tags[0].ID) + + sortBy = "movies_count" + tags = queryTags(ctx, t, sqb, nil, findFilter) + assert.Equal(tagIDs[tagIdx1WithGroup], tags[0].ID) + return nil }) } @@ -538,6 +573,45 @@ func verifyTagPerformerCount(t *testing.T, imageCountCriterion models.IntCriteri }) } +func TestTagQueryStudioCount(t *testing.T) { + countCriterion := models.IntCriterionInput{ + Value: 1, + Modifier: models.CriterionModifierEquals, + } + + verifyTagStudioCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierNotEquals + verifyTagStudioCount(t, countCriterion) + + countCriterion.Modifier = models.CriterionModifierLessThan + verifyTagStudioCount(t, countCriterion) + + countCriterion.Value = 0 + countCriterion.Modifier = models.CriterionModifierGreaterThan + verifyTagStudioCount(t, countCriterion) +} + +func verifyTagStudioCount(t *testing.T, imageCountCriterion models.IntCriterionInput) { + withTxn(func(ctx context.Context) error { + qb := db.Tag + tagFilter := models.TagFilterType{ + StudioCount: &imageCountCriterion, + } + + tags, _, err := qb.Query(ctx, &tagFilter, nil) + if err != nil { + t.Errorf("Error querying tag: %s", err.Error()) + } + + for _, tag := range tags { + verifyInt(t, getTagStudioCount(tag.ID), imageCountCriterion) + } + + return nil + }) +} + func TestTagQueryParentCount(t *testing.T) { countCriterion := models.IntCriterionInput{ Value: 1, @@ -638,7 +712,7 @@ func TestTagQueryParent(t *testing.T) { assert.Len(t, tags, 1) // ensure id is correct - assert.Equal(t, sceneIDs[tagIdxWithParentTag], tags[0].ID) + assert.Equal(t, tagIDs[tagIdxWithParentTag], tags[0].ID) tagCriterion.Modifier = models.CriterionModifierExcludes @@ -851,6 +925,9 @@ func TestTagMerge(t *testing.T) { tagIdxWithPerformer, tagIdx1WithPerformer, tagIdx2WithPerformer, + tagIdxWithStudio, + tagIdx1WithStudio, + tagIdx2WithStudio, tagIdxWithGallery, tagIdx1WithGallery, tagIdx2WithGallery, @@ -939,6 +1016,14 @@ func TestTagMerge(t *testing.T) { assert.Contains(performerTagIDs, destID) + // ensure studio points to new tag + studioTagIDs, err := db.Studio.GetTagIDs(ctx, studioIDs[studioIdxWithTwoTags]) + if err != nil { + return err + } + + assert.Contains(studioTagIDs, destID) + return nil }); err != nil { t.Error(err.Error()) diff --git a/pkg/sqlite/transaction.go b/pkg/sqlite/transaction.go index eda5b6b8d2d..705c61e0789 100644 --- a/pkg/sqlite/transaction.go +++ b/pkg/sqlite/transaction.go @@ -132,7 +132,7 @@ func (db *Database) Repository() models.Repository { Gallery: db.Gallery, GalleryChapter: db.GalleryChapter, Image: db.Image, - Movie: db.Movie, + Group: db.Group, Performer: db.Performer, Scene: db.Scene, SceneMarker: db.SceneMarker, diff --git a/pkg/sqlite/tx.go b/pkg/sqlite/tx.go index 64df163a0b6..a2e272aa9f3 100644 --- a/pkg/sqlite/tx.go +++ b/pkg/sqlite/tx.go @@ -35,7 +35,9 @@ func logSQL(start time.Time, query string, args ...interface{}) { } } -type dbWrapper struct{} +type dbWrapperType struct{} + +var dbWrapper = dbWrapperType{} func sqlError(err error, sql string, args ...interface{}) error { if err == nil { @@ -45,7 +47,7 @@ func sqlError(err error, sql string, args ...interface{}) error { return fmt.Errorf("error executing `%s` [%v]: %w", sql, args, err) } -func (*dbWrapper) Get(ctx context.Context, dest interface{}, query string, args ...interface{}) error { +func (*dbWrapperType) Get(ctx context.Context, dest interface{}, query string, args ...interface{}) error { tx, err := getDBReader(ctx) if err != nil { return sqlError(err, query, args...) @@ -58,7 +60,7 @@ func (*dbWrapper) Get(ctx context.Context, dest interface{}, query string, args return sqlError(err, query, args...) } -func (*dbWrapper) Select(ctx context.Context, dest interface{}, query string, args ...interface{}) error { +func (*dbWrapperType) Select(ctx context.Context, dest interface{}, query string, args ...interface{}) error { tx, err := getDBReader(ctx) if err != nil { return sqlError(err, query, args...) @@ -71,7 +73,7 @@ func (*dbWrapper) Select(ctx context.Context, dest interface{}, query string, ar return sqlError(err, query, args...) } -func (*dbWrapper) Queryx(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error) { +func (*dbWrapperType) Queryx(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error) { tx, err := getDBReader(ctx) if err != nil { return nil, sqlError(err, query, args...) @@ -84,7 +86,7 @@ func (*dbWrapper) Queryx(ctx context.Context, query string, args ...interface{}) return ret, sqlError(err, query, args...) } -func (*dbWrapper) QueryxContext(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error) { +func (*dbWrapperType) QueryxContext(ctx context.Context, query string, args ...interface{}) (*sqlx.Rows, error) { tx, err := getDBReader(ctx) if err != nil { return nil, sqlError(err, query, args...) @@ -97,7 +99,7 @@ func (*dbWrapper) QueryxContext(ctx context.Context, query string, args ...inter return ret, sqlError(err, query, args...) } -func (*dbWrapper) NamedExec(ctx context.Context, query string, arg interface{}) (sql.Result, error) { +func (*dbWrapperType) NamedExec(ctx context.Context, query string, arg interface{}) (sql.Result, error) { tx, err := getTx(ctx) if err != nil { return nil, sqlError(err, query, arg) @@ -110,7 +112,7 @@ func (*dbWrapper) NamedExec(ctx context.Context, query string, arg interface{}) return ret, sqlError(err, query, arg) } -func (*dbWrapper) Exec(ctx context.Context, query string, args ...interface{}) (sql.Result, error) { +func (*dbWrapperType) Exec(ctx context.Context, query string, args ...interface{}) (sql.Result, error) { tx, err := getTx(ctx) if err != nil { return nil, sqlError(err, query, args...) @@ -124,7 +126,7 @@ func (*dbWrapper) Exec(ctx context.Context, query string, args ...interface{}) ( } // Prepare creates a prepared statement. -func (*dbWrapper) Prepare(ctx context.Context, query string, args ...interface{}) (*stmt, error) { +func (*dbWrapperType) Prepare(ctx context.Context, query string, args ...interface{}) (*stmt, error) { tx, err := getTx(ctx) if err != nil { return nil, sqlError(err, query, args...) @@ -142,7 +144,7 @@ func (*dbWrapper) Prepare(ctx context.Context, query string, args ...interface{} }, nil } -func (*dbWrapper) ExecStmt(ctx context.Context, stmt *stmt, args ...interface{}) (sql.Result, error) { +func (*dbWrapperType) ExecStmt(ctx context.Context, stmt *stmt, args ...interface{}) (sql.Result, error) { _, err := getTx(ctx) if err != nil { return nil, sqlError(err, stmt.query, args...) diff --git a/pkg/studio/doc.go b/pkg/studio/doc.go new file mode 100644 index 00000000000..72c429c5771 --- /dev/null +++ b/pkg/studio/doc.go @@ -0,0 +1,2 @@ +// Package studio provides the application logic for studio functionality. +package studio diff --git a/pkg/studio/export_test.go b/pkg/studio/export_test.go index da6da8ad4f8..0e42141ec37 100644 --- a/pkg/studio/export_test.go +++ b/pkg/studio/export_test.go @@ -68,6 +68,7 @@ func createFullStudio(id int, parentID int) models.Studio { Rating: &rating, IgnoreAutoTag: autoTagIgnored, Aliases: models.NewRelatedStrings(aliases), + TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs(stashIDs), } @@ -84,6 +85,7 @@ func createEmptyStudio(id int) models.Studio { CreatedAt: createTime, UpdatedAt: updateTime, Aliases: models.NewRelatedStrings([]string{}), + TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs([]models.StashID{}), } } diff --git a/pkg/studio/import.go b/pkg/studio/import.go index bfee4133fb3..d880650787d 100644 --- a/pkg/studio/import.go +++ b/pkg/studio/import.go @@ -4,9 +4,11 @@ import ( "context" "errors" "fmt" + "strings" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" + "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/utils" ) @@ -19,6 +21,7 @@ var ErrParentStudioNotExist = errors.New("parent studio does not exist") type Importer struct { ReaderWriter ImporterReaderWriter + TagWriter models.TagFinderCreator Input jsonschema.Studio MissingRefBehaviour models.ImportMissingRefEnum @@ -34,6 +37,10 @@ func (i *Importer) PreImport(ctx context.Context) error { return err } + if err := i.populateTags(ctx); err != nil { + return err + } + var err error if len(i.Input.Image) > 0 { i.imageData, err = utils.ProcessBase64Image(i.Input.Image) @@ -45,6 +52,74 @@ func (i *Importer) PreImport(ctx context.Context) error { return nil } +func (i *Importer) populateTags(ctx context.Context) error { + if len(i.Input.Tags) > 0 { + + tags, err := importTags(ctx, i.TagWriter, i.Input.Tags, i.MissingRefBehaviour) + if err != nil { + return err + } + + for _, p := range tags { + i.studio.TagIDs.Add(p.ID) + } + } + + return nil +} + +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { + tags, err := tagWriter.FindByNames(ctx, names, false) + if err != nil { + return nil, err + } + + var pluckedNames []string + for _, tag := range tags { + pluckedNames = append(pluckedNames, tag.Name) + } + + missingTags := sliceutil.Filter(names, func(name string) bool { + return !sliceutil.Contains(pluckedNames, name) + }) + + if len(missingTags) > 0 { + if missingRefBehaviour == models.ImportMissingRefEnumFail { + return nil, fmt.Errorf("tags [%s] not found", strings.Join(missingTags, ", ")) + } + + if missingRefBehaviour == models.ImportMissingRefEnumCreate { + createdTags, err := createTags(ctx, tagWriter, missingTags) + if err != nil { + return nil, fmt.Errorf("error creating tags: %v", err) + } + + tags = append(tags, createdTags...) + } + + // ignore if MissingRefBehaviour set to Ignore + } + + return tags, nil +} + +func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string) ([]*models.Tag, error) { + var ret []*models.Tag + for _, name := range names { + newTag := models.NewTag() + newTag.Name = name + + err := tagWriter.Create(ctx, &newTag) + if err != nil { + return nil, err + } + + ret = append(ret, &newTag) + } + + return ret, nil +} + func (i *Importer) populateParentStudio(ctx context.Context) error { if i.Input.ParentStudio != "" { studio, err := i.ReaderWriter.FindByName(ctx, i.Input.ParentStudio, false) @@ -149,6 +224,7 @@ func studioJSONtoStudio(studioJSON jsonschema.Studio) models.Studio { CreatedAt: studioJSON.CreatedAt.GetTime(), UpdatedAt: studioJSON.UpdatedAt.GetTime(), + TagIDs: models.NewRelatedIDs([]int{}), StashIDs: models.NewRelatedStashIDs(studioJSON.StashIDs), } diff --git a/pkg/studio/import_test.go b/pkg/studio/import_test.go index e89256371cf..882b8ca5682 100644 --- a/pkg/studio/import_test.go +++ b/pkg/studio/import_test.go @@ -16,13 +16,19 @@ const invalidImage = "aW1hZ2VCeXRlcw&&" const ( studioNameErr = "studioNameErr" - existingStudioName = "existingTagName" + existingStudioName = "existingStudioName" existingStudioID = 100 + existingTagID = 105 + errTagsID = 106 existingParentStudioName = "existingParentStudioName" existingParentStudioErr = "existingParentStudioErr" missingParentStudioName = "existingParentStudioName" + + existingTagName = "existingTagName" + existingTagErr = "existingTagErr" + missingTagName = "missingTagName" ) var testCtx = context.Background() @@ -67,6 +73,97 @@ func TestImporterPreImport(t *testing.T) { assert.Equal(t, expectedStudio, i.studio) } +func TestImporterPreImportWithTag(t *testing.T) { + db := mocks.NewDatabase() + + i := Importer{ + ReaderWriter: db.Studio, + TagWriter: db.Tag, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + Input: jsonschema.Studio{ + Tags: []string{ + existingTagName, + }, + }, + } + + db.Tag.On("FindByNames", testCtx, []string{existingTagName}, false).Return([]*models.Tag{ + { + ID: existingTagID, + Name: existingTagName, + }, + }, nil).Once() + db.Tag.On("FindByNames", testCtx, []string{existingTagErr}, false).Return(nil, errors.New("FindByNames error")).Once() + + err := i.PreImport(testCtx) + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.studio.TagIDs.List()[0]) + + i.Input.Tags = []string{existingTagErr} + err = i.PreImport(testCtx) + assert.NotNil(t, err) + + db.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTag(t *testing.T) { + db := mocks.NewDatabase() + + i := Importer{ + ReaderWriter: db.Studio, + TagWriter: db.Tag, + Input: jsonschema.Studio{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumFail, + } + + db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Times(3) + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Run(func(args mock.Arguments) { + t := args.Get(1).(*models.Tag) + t.ID = existingTagID + }).Return(nil) + + err := i.PreImport(testCtx) + assert.NotNil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumIgnore + err = i.PreImport(testCtx) + assert.Nil(t, err) + + i.MissingRefBehaviour = models.ImportMissingRefEnumCreate + err = i.PreImport(testCtx) + assert.Nil(t, err) + assert.Equal(t, existingTagID, i.studio.TagIDs.List()[0]) + + db.AssertExpectations(t) +} + +func TestImporterPreImportWithMissingTagCreateErr(t *testing.T) { + db := mocks.NewDatabase() + + i := Importer{ + ReaderWriter: db.Studio, + TagWriter: db.Tag, + Input: jsonschema.Studio{ + Tags: []string{ + missingTagName, + }, + }, + MissingRefBehaviour: models.ImportMissingRefEnumCreate, + } + + db.Tag.On("FindByNames", testCtx, []string{missingTagName}, false).Return(nil, nil).Once() + db.Tag.On("Create", testCtx, mock.AnythingOfType("*models.Tag")).Return(errors.New("Create error")) + + err := i.PreImport(testCtx) + assert.NotNil(t, err) + + db.AssertExpectations(t) +} + func TestImporterPreImportWithParent(t *testing.T) { db := mocks.NewDatabase() @@ -156,6 +253,7 @@ func TestImporterPostImport(t *testing.T) { i := Importer{ ReaderWriter: db.Studio, + TagWriter: db.Tag, Input: jsonschema.Studio{ Aliases: []string{"alias"}, }, @@ -181,6 +279,7 @@ func TestImporterFindExistingID(t *testing.T) { i := Importer{ ReaderWriter: db.Studio, + TagWriter: db.Tag, Input: jsonschema.Studio{ Name: studioName, }, @@ -223,6 +322,7 @@ func TestCreate(t *testing.T) { i := Importer{ ReaderWriter: db.Studio, + TagWriter: db.Tag, studio: studio, } @@ -258,6 +358,7 @@ func TestUpdate(t *testing.T) { i := Importer{ ReaderWriter: db.Studio, + TagWriter: db.Tag, studio: studio, } diff --git a/pkg/studio/query.go b/pkg/studio/query.go index b20cec33109..97e8e2c1bbe 100644 --- a/pkg/studio/query.go +++ b/pkg/studio/query.go @@ -2,6 +2,7 @@ package studio import ( "context" + "strconv" "github.com/stashapp/stash/pkg/models" ) @@ -53,3 +54,15 @@ func ByAlias(ctx context.Context, qb models.StudioQueryer, alias string) (*model return nil, nil } + +func CountByTagID(ctx context.Context, qb models.StudioQueryer, id int, depth *int) (int, error) { + filter := &models.StudioFilterType{ + Tags: &models.HierarchicalMultiCriterionInput{ + Value: []string{strconv.Itoa(id)}, + Modifier: models.CriterionModifierIncludes, + Depth: depth, + }, + } + + return qb.QueryCount(ctx, filter, nil) +} diff --git a/pkg/tag/doc.go b/pkg/tag/doc.go new file mode 100644 index 00000000000..604ab88c147 --- /dev/null +++ b/pkg/tag/doc.go @@ -0,0 +1,2 @@ +// Package tag provides application logic for tag objects. +package tag diff --git a/pkg/tag/update.go b/pkg/tag/update.go index dcb78bf9cab..99e9b916569 100644 --- a/pkg/tag/update.go +++ b/pkg/tag/update.go @@ -33,6 +33,10 @@ type InvalidTagHierarchyError struct { } func (e *InvalidTagHierarchyError) Error() string { + if e.ApplyingTag == "" { + return fmt.Sprintf("cannot apply tag \"%s\" as a %s of tag as it is already %s", e.InvalidTag, e.Direction, e.CurrentRelation) + } + return fmt.Sprintf("cannot apply tag \"%s\" as a %s of \"%s\" as it is already %s (%s)", e.InvalidTag, e.Direction, e.ApplyingTag, e.CurrentRelation, e.TagPath) } @@ -80,16 +84,83 @@ func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb model type RelationshipFinder interface { FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) - FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) - FindByParentTagID(ctx context.Context, parentID int) ([]*models.Tag, error) + models.TagRelationLoader +} + +func ValidateHierarchyNew(ctx context.Context, parentIDs, childIDs []int, qb RelationshipFinder) error { + allAncestors := make(map[int]*models.TagPath) + allDescendants := make(map[int]*models.TagPath) + + for _, parentID := range parentIDs { + parentsAncestors, err := qb.FindAllAncestors(ctx, parentID, nil) + if err != nil { + return err + } + + for _, ancestorTag := range parentsAncestors { + allAncestors[ancestorTag.ID] = ancestorTag + } + } + + for _, childID := range childIDs { + childsDescendants, err := qb.FindAllDescendants(ctx, childID, nil) + if err != nil { + return err + } + + for _, descendentTag := range childsDescendants { + allDescendants[descendentTag.ID] = descendentTag + } + } + + // Validate that the tag is not a parent of any of its ancestors + validateParent := func(testID int) error { + if parentTag, exists := allDescendants[testID]; exists { + return &InvalidTagHierarchyError{ + Direction: "parent", + CurrentRelation: "a descendant", + InvalidTag: parentTag.Name, + TagPath: parentTag.Path, + } + } + + return nil + } + + // Validate that the tag is not a child of any of its ancestors + validateChild := func(testID int) error { + if childTag, exists := allAncestors[testID]; exists { + return &InvalidTagHierarchyError{ + Direction: "child", + CurrentRelation: "an ancestor", + InvalidTag: childTag.Name, + TagPath: childTag.Path, + } + } + + return nil + } + + for _, parentID := range parentIDs { + if err := validateParent(parentID); err != nil { + return err + } + } + + for _, childID := range childIDs { + if err := validateChild(childID); err != nil { + return err + } + } + + return nil } -func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipFinder) error { - id := tag.ID +func ValidateHierarchyExisting(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipFinder) error { allAncestors := make(map[int]*models.TagPath) allDescendants := make(map[int]*models.TagPath) - parentsAncestors, err := qb.FindAllAncestors(ctx, id, nil) + parentsAncestors, err := qb.FindAllAncestors(ctx, tag.ID, nil) if err != nil { return err } @@ -98,7 +169,7 @@ func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs allAncestors[ancestorTag.ID] = ancestorTag } - childsDescendants, err := qb.FindAllDescendants(ctx, id, nil) + childsDescendants, err := qb.FindAllDescendants(ctx, tag.ID, nil) if err != nil { return err } @@ -135,28 +206,6 @@ func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs return nil } - if parentIDs == nil { - parentTags, err := qb.FindByChildTagID(ctx, id) - if err != nil { - return err - } - - for _, parentTag := range parentTags { - parentIDs = append(parentIDs, parentTag.ID) - } - } - - if childIDs == nil { - childTags, err := qb.FindByParentTagID(ctx, id) - if err != nil { - return err - } - - for _, childTag := range childTags { - childIDs = append(childIDs, childTag.ID) - } - } - for _, parentID := range parentIDs { if err := validateParent(parentID); err != nil { return err @@ -176,38 +225,38 @@ func MergeHierarchy(ctx context.Context, destination int, sources []int, qb Rela var mergedParents, mergedChildren []int allIds := append([]int{destination}, sources...) - addTo := func(mergedItems []int, tags []*models.Tag) []int { + addTo := func(mergedItems []int, tagIDs []int) []int { Tags: - for _, tag := range tags { + for _, tagID := range tagIDs { // Ignore tags which are already set for _, existingItem := range mergedItems { - if tag.ID == existingItem { + if tagID == existingItem { continue Tags } } // Ignore tags which are being merged, as these are rolled up anyway (if A is merged into B any direct link between them can be ignored) for _, id := range allIds { - if tag.ID == id { + if tagID == id { continue Tags } } - mergedItems = append(mergedItems, tag.ID) + mergedItems = append(mergedItems, tagID) } return mergedItems } for _, id := range allIds { - parents, err := qb.FindByChildTagID(ctx, id) + parents, err := qb.GetParentIDs(ctx, id) if err != nil { return nil, nil, err } mergedParents = addTo(mergedParents, parents) - children, err := qb.FindByParentTagID(ctx, id) + children, err := qb.GetChildIDs(ctx, id) if err != nil { return nil, nil, err } diff --git a/pkg/tag/update_test.go b/pkg/tag/update_test.go index c581d34ac43..462c981434f 100644 --- a/pkg/tag/update_test.go +++ b/pkg/tag/update_test.go @@ -211,14 +211,11 @@ var testUniqueHierarchyCases = []testUniqueHierarchyCase{ func TestEnsureHierarchy(t *testing.T) { for _, tc := range testUniqueHierarchyCases { - testEnsureHierarchy(t, tc, false, false) - testEnsureHierarchy(t, tc, true, false) - testEnsureHierarchy(t, tc, false, true) - testEnsureHierarchy(t, tc, true, true) + testEnsureHierarchy(t, tc) } } -func testEnsureHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryParents, queryChildren bool) { +func testEnsureHierarchy(t *testing.T, tc testUniqueHierarchyCase) { db := mocks.NewDatabase() var parentIDs, childIDs []int @@ -244,16 +241,6 @@ func testEnsureHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryParents, } } - if queryParents { - parentIDs = nil - db.Tag.On("FindByChildTagID", testCtx, tc.id).Return(tc.parents, nil).Once() - } - - if queryChildren { - childIDs = nil - db.Tag.On("FindByParentTagID", testCtx, tc.id).Return(tc.children, nil).Once() - } - db.Tag.On("FindAllAncestors", testCtx, mock.AnythingOfType("int"), []int(nil)).Return(func(ctx context.Context, tagID int, excludeIDs []int) []*models.TagPath { return tc.onFindAllAncestors }, func(ctx context.Context, tagID int, excludeIDs []int) error { @@ -272,7 +259,7 @@ func testEnsureHierarchy(t *testing.T, tc testUniqueHierarchyCase, queryParents, return fmt.Errorf("undefined descendants for: %d", tagID) }).Maybe() - res := ValidateHierarchy(testCtx, testUniqueHierarchyTags[tc.id], parentIDs, childIDs, db.Tag) + res := ValidateHierarchyExisting(testCtx, testUniqueHierarchyTags[tc.id], parentIDs, childIDs, db.Tag) assert := assert.New(t) diff --git a/pkg/tag/validate.go b/pkg/tag/validate.go new file mode 100644 index 00000000000..966cec9451b --- /dev/null +++ b/pkg/tag/validate.go @@ -0,0 +1,102 @@ +package tag + +import ( + "context" + "errors" + "fmt" + + "github.com/stashapp/stash/pkg/models" +) + +var ( + ErrNameMissing = errors.New("tag name must not be blank") +) + +type NotFoundError struct { + id int +} + +func (e *NotFoundError) Error() string { + return fmt.Sprintf("tag with id %d not found", e.id) +} + +func ValidateCreate(ctx context.Context, tag models.Tag, qb models.TagReader) error { + if tag.Name == "" { + return ErrNameMissing + } + + if err := EnsureTagNameUnique(ctx, 0, tag.Name, qb); err != nil { + return err + } + + if tag.Aliases.Loaded() { + if err := EnsureAliasesUnique(ctx, tag.ID, tag.Aliases.List(), qb); err != nil { + return err + } + } + + if len(tag.ParentIDs.List()) > 0 || len(tag.ChildIDs.List()) > 0 { + if err := ValidateHierarchyNew(ctx, tag.ParentIDs.List(), tag.ChildIDs.List(), qb); err != nil { + return err + } + } + + return nil +} + +func ValidateUpdate(ctx context.Context, id int, partial models.TagPartial, qb models.TagReader) error { + existing, err := qb.Find(ctx, id) + if err != nil { + return err + } + + if existing == nil { + return &NotFoundError{id} + } + + if partial.Name.Set { + if partial.Name.Value == "" { + return ErrNameMissing + } + + if err := EnsureTagNameUnique(ctx, id, partial.Name.Value, qb); err != nil { + return err + } + } + + if partial.Aliases != nil { + if err := existing.LoadAliases(ctx, qb); err != nil { + return err + } + + if err := EnsureAliasesUnique(ctx, id, partial.Aliases.Apply(existing.Aliases.List()), qb); err != nil { + return err + } + } + + if partial.ParentIDs != nil || partial.ChildIDs != nil { + if err := existing.LoadParentIDs(ctx, qb); err != nil { + return err + } + + if err := existing.LoadChildIDs(ctx, qb); err != nil { + return err + } + + parentIDs := partial.ParentIDs + if parentIDs == nil { + parentIDs = &models.UpdateIDs{IDs: existing.ParentIDs.List(), Mode: models.RelationshipUpdateModeSet} + } + + childIDs := partial.ChildIDs + if childIDs == nil { + childIDs = &models.UpdateIDs{IDs: existing.ChildIDs.List(), Mode: models.RelationshipUpdateModeSet} + } + + if err := ValidateHierarchyExisting(ctx, existing, parentIDs.Apply(existing.ParentIDs.List()), childIDs.Apply(existing.ChildIDs.List()), qb); err != nil { + return err + } + } + + return nil +} diff --git a/pkg/txn/transaction.go b/pkg/txn/transaction.go index 751588eff0b..b8d0aa8300c 100644 --- a/pkg/txn/transaction.go +++ b/pkg/txn/transaction.go @@ -1,3 +1,4 @@ +// Package txn provides functions for running transactions. package txn import ( diff --git a/pkg/utils/doc.go b/pkg/utils/doc.go new file mode 100644 index 00000000000..2ea42ced311 --- /dev/null +++ b/pkg/utils/doc.go @@ -0,0 +1,2 @@ +// Package utils provides various utility functions for the application. +package utils diff --git a/pkg/utils/url.go b/pkg/utils/url.go new file mode 100644 index 00000000000..e4d2df23735 --- /dev/null +++ b/pkg/utils/url.go @@ -0,0 +1,15 @@ +package utils + +import "regexp" + +// URLFromHandle adds the site URL to the input if the input is not already a URL +// siteURL must not end with a slash +func URLFromHandle(input string, siteURL string) string { + // if the input is already a URL, return it + re := regexp.MustCompile(`^https?://`) + if re.MatchString(input) { + return input + } + + return siteURL + "/" + input +} diff --git a/pkg/utils/url_test.go b/pkg/utils/url_test.go new file mode 100644 index 00000000000..3076314a7fe --- /dev/null +++ b/pkg/utils/url_test.go @@ -0,0 +1,47 @@ +package utils + +import "testing" + +func TestURLFromHandle(t *testing.T) { + type args struct { + input string + siteURL string + } + tests := []struct { + name string + args args + want string + }{ + { + name: "input is already a URL https", + args: args{ + input: "https://foo.com", + siteURL: "https://bar.com", + }, + want: "https://foo.com", + }, + { + name: "input is already a URL http", + args: args{ + input: "http://foo.com", + siteURL: "https://bar.com", + }, + want: "http://foo.com", + }, + { + name: "input is not a URL", + args: args{ + input: "foo", + siteURL: "https://foo.com", + }, + want: "https://foo.com/foo", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := URLFromHandle(tt.args.input, tt.args.siteURL); got != tt.want { + t.Errorf("URLFromHandle() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/ui/v2.5/graphql/client-schema.graphql b/ui/v2.5/graphql/client-schema.graphql index 92f53cc3a33..57eff3343a8 100644 --- a/ui/v2.5/graphql/client-schema.graphql +++ b/ui/v2.5/graphql/client-schema.graphql @@ -16,11 +16,6 @@ extend input SaveFilterInput { ui_options: SavedUIOptions } -extend input SetDefaultFilterInput { - object_filter: SavedObjectFilter - ui_options: SavedUIOptions -} - extend type Mutation { configureUI(input: Map, partial: Map): UIConfig! } diff --git a/ui/v2.5/graphql/data/gallery-slim.graphql b/ui/v2.5/graphql/data/gallery-slim.graphql index fd2688777e7..633071e3f42 100644 --- a/ui/v2.5/graphql/data/gallery-slim.graphql +++ b/ui/v2.5/graphql/data/gallery-slim.graphql @@ -15,15 +15,6 @@ fragment SlimGalleryData on Gallery { ...FolderData } image_count - cover { - id - files { - ...ImageFileData - } - paths { - thumbnail - } - } chapters { id title @@ -48,4 +39,8 @@ fragment SlimGalleryData on Gallery { scenes { ...SlimSceneData } + paths { + cover + preview + } } diff --git a/ui/v2.5/graphql/data/gallery.graphql b/ui/v2.5/graphql/data/gallery.graphql index 5a5db3c1ace..c41f3e2b259 100644 --- a/ui/v2.5/graphql/data/gallery.graphql +++ b/ui/v2.5/graphql/data/gallery.graphql @@ -11,6 +11,11 @@ fragment GalleryData on Gallery { rating100 organized + paths { + cover + preview + } + files { ...GalleryFileData } @@ -21,9 +26,6 @@ fragment GalleryData on Gallery { chapters { ...GalleryChapterData } - cover { - ...SlimImageData - } studio { ...SlimStudioData } @@ -52,6 +54,9 @@ fragment SelectGalleryData on Gallery { thumbnail } } + paths { + preview + } files { path } diff --git a/ui/v2.5/graphql/data/movie-slim.graphql b/ui/v2.5/graphql/data/group-slim.graphql similarity index 63% rename from ui/v2.5/graphql/data/movie-slim.graphql rename to ui/v2.5/graphql/data/group-slim.graphql index 2db5b80bd45..ddb18d4e276 100644 --- a/ui/v2.5/graphql/data/movie-slim.graphql +++ b/ui/v2.5/graphql/data/group-slim.graphql @@ -1,11 +1,11 @@ -fragment SlimMovieData on Movie { +fragment SlimGroupData on Group { id name front_image_path rating100 } -fragment SelectMovieData on Movie { +fragment SelectGroupData on Group { id name aliases diff --git a/ui/v2.5/graphql/data/group.graphql b/ui/v2.5/graphql/data/group.graphql new file mode 100644 index 00000000000..963e8d6e672 --- /dev/null +++ b/ui/v2.5/graphql/data/group.graphql @@ -0,0 +1,38 @@ +fragment GroupData on Group { + id + name + aliases + duration + date + rating100 + director + + studio { + ...SlimStudioData + } + + tags { + ...SlimTagData + } + + containing_groups { + group { + ...SlimGroupData + } + description + } + + synopsis + urls + front_image_path + back_image_path + scene_count + scene_count_all: scene_count(depth: -1) + sub_group_count + sub_group_count_all: sub_group_count(depth: -1) + + scenes { + id + title + } +} diff --git a/ui/v2.5/graphql/data/movie.graphql b/ui/v2.5/graphql/data/movie.graphql deleted file mode 100644 index 3fd4273d28f..00000000000 --- a/ui/v2.5/graphql/data/movie.graphql +++ /dev/null @@ -1,24 +0,0 @@ -fragment MovieData on Movie { - id - name - aliases - duration - date - rating100 - director - - studio { - ...SlimStudioData - } - - synopsis - url - front_image_path - back_image_path - scene_count - - scenes { - id - title - } -} diff --git a/ui/v2.5/graphql/data/performer-slim.graphql b/ui/v2.5/graphql/data/performer-slim.graphql index 0018c97009d..1a4b9833bc8 100644 --- a/ui/v2.5/graphql/data/performer-slim.graphql +++ b/ui/v2.5/graphql/data/performer-slim.graphql @@ -3,9 +3,7 @@ fragment SlimPerformerData on Performer { name disambiguation gender - url - twitter - instagram + urls image_path favorite ignore_auto_tag @@ -41,4 +39,6 @@ fragment SelectPerformerData on Performer { disambiguation alias_list image_path + birthdate + death_date } diff --git a/ui/v2.5/graphql/data/performer.graphql b/ui/v2.5/graphql/data/performer.graphql index cd43ca4a58a..144382a4522 100644 --- a/ui/v2.5/graphql/data/performer.graphql +++ b/ui/v2.5/graphql/data/performer.graphql @@ -2,10 +2,8 @@ fragment PerformerData on Performer { id name disambiguation - url + urls gender - twitter - instagram birthdate ethnicity country @@ -25,7 +23,7 @@ fragment PerformerData on Performer { scene_count image_count gallery_count - movie_count + group_count performer_count o_counter diff --git a/ui/v2.5/graphql/data/scene-slim.graphql b/ui/v2.5/graphql/data/scene-slim.graphql index c24eb9752b7..7e2a4ffad2d 100644 --- a/ui/v2.5/graphql/data/scene-slim.graphql +++ b/ui/v2.5/graphql/data/scene-slim.graphql @@ -58,8 +58,8 @@ fragment SlimSceneData on Scene { image_path } - movies { - movie { + groups { + group { id name front_image_path diff --git a/ui/v2.5/graphql/data/scene.graphql b/ui/v2.5/graphql/data/scene.graphql index 2b9ef76a3ea..ef58922295a 100644 --- a/ui/v2.5/graphql/data/scene.graphql +++ b/ui/v2.5/graphql/data/scene.graphql @@ -53,9 +53,9 @@ fragment SceneData on Scene { ...SlimStudioData } - movies { - movie { - ...MovieData + groups { + group { + ...GroupData } scene_index } diff --git a/ui/v2.5/graphql/data/scrapers.graphql b/ui/v2.5/graphql/data/scrapers.graphql index 94b6434b164..7e12610a040 100644 --- a/ui/v2.5/graphql/data/scrapers.graphql +++ b/ui/v2.5/graphql/data/scrapers.graphql @@ -18,9 +18,7 @@ fragment ScrapedPerformerData on ScrapedPerformer { name disambiguation gender - url - twitter - instagram + urls birthdate ethnicity country @@ -50,9 +48,7 @@ fragment ScrapedScenePerformerData on ScrapedPerformer { name disambiguation gender - url - twitter - instagram + urls birthdate ethnicity country @@ -77,30 +73,33 @@ fragment ScrapedScenePerformerData on ScrapedPerformer { weight } -fragment ScrapedMovieStudioData on ScrapedStudio { +fragment ScrapedGroupStudioData on ScrapedStudio { stored_id name url } -fragment ScrapedMovieData on ScrapedMovie { +fragment ScrapedGroupData on ScrapedGroup { name aliases duration date rating director - url + urls synopsis front_image back_image studio { - ...ScrapedMovieStudioData + ...ScrapedGroupStudioData + } + tags { + ...ScrapedSceneTagData } } -fragment ScrapedSceneMovieData on ScrapedMovie { +fragment ScrapedSceneGroupData on ScrapedGroup { stored_id name aliases @@ -108,13 +107,16 @@ fragment ScrapedSceneMovieData on ScrapedMovie { date rating director - url + urls synopsis front_image back_image studio { - ...ScrapedMovieStudioData + ...ScrapedGroupStudioData + } + tags { + ...ScrapedSceneTagData } } @@ -171,8 +173,8 @@ fragment ScrapedSceneData on ScrapedScene { ...ScrapedScenePerformerData } - movies { - ...ScrapedSceneMovieData + groups { + ...ScrapedSceneGroupData } fingerprints { @@ -243,8 +245,8 @@ fragment ScrapedStashBoxSceneData on ScrapedScene { ...ScrapedScenePerformerData } - movies { - ...ScrapedSceneMovieData + groups { + ...ScrapedSceneGroupData } } diff --git a/ui/v2.5/graphql/data/studio-slim.graphql b/ui/v2.5/graphql/data/studio-slim.graphql index c3751319427..406a2ffa70a 100644 --- a/ui/v2.5/graphql/data/studio-slim.graphql +++ b/ui/v2.5/graphql/data/studio-slim.graphql @@ -12,4 +12,8 @@ fragment SlimStudioData on Studio { details rating100 aliases + tags { + id + name + } } diff --git a/ui/v2.5/graphql/data/studio.graphql b/ui/v2.5/graphql/data/studio.graphql index 576faea230d..feb35136fed 100644 --- a/ui/v2.5/graphql/data/studio.graphql +++ b/ui/v2.5/graphql/data/studio.graphql @@ -23,8 +23,8 @@ fragment StudioData on Studio { gallery_count_all: gallery_count(depth: -1) performer_count performer_count_all: performer_count(depth: -1) - movie_count - movie_count_all: movie_count(depth: -1) + group_count + group_count_all: group_count(depth: -1) stash_ids { stash_id endpoint @@ -33,6 +33,9 @@ fragment StudioData on Studio { rating100 favorite aliases + tags { + ...SlimTagData + } } fragment SelectStudioData on Studio { diff --git a/ui/v2.5/graphql/data/tag.graphql b/ui/v2.5/graphql/data/tag.graphql index b71f487abaa..b0501de697c 100644 --- a/ui/v2.5/graphql/data/tag.graphql +++ b/ui/v2.5/graphql/data/tag.graphql @@ -16,6 +16,10 @@ fragment TagData on Tag { gallery_count_all: gallery_count(depth: -1) performer_count performer_count_all: performer_count(depth: -1) + studio_count + studio_count_all: studio_count(depth: -1) + group_count + group_count_all: group_count(depth: -1) parents { ...SlimTagData diff --git a/ui/v2.5/graphql/mutations/filter.graphql b/ui/v2.5/graphql/mutations/filter.graphql index 5d801312379..68a6403a104 100644 --- a/ui/v2.5/graphql/mutations/filter.graphql +++ b/ui/v2.5/graphql/mutations/filter.graphql @@ -7,7 +7,3 @@ mutation SaveFilter($input: SaveFilterInput!) { mutation DestroySavedFilter($input: DestroyFilterInput!) { destroySavedFilter(input: $input) } - -mutation SetDefaultFilter($input: SetDefaultFilterInput!) { - setDefaultFilter(input: $input) -} diff --git a/ui/v2.5/graphql/mutations/gallery.graphql b/ui/v2.5/graphql/mutations/gallery.graphql index 9f9fd1e0b48..d76f98a4f95 100644 --- a/ui/v2.5/graphql/mutations/gallery.graphql +++ b/ui/v2.5/graphql/mutations/gallery.graphql @@ -43,3 +43,13 @@ mutation AddGalleryImages($gallery_id: ID!, $image_ids: [ID!]!) { mutation RemoveGalleryImages($gallery_id: ID!, $image_ids: [ID!]!) { removeGalleryImages(input: { gallery_id: $gallery_id, image_ids: $image_ids }) } + +mutation SetGalleryCover($gallery_id: ID!, $cover_image_id: ID!) { + setGalleryCover( + input: { gallery_id: $gallery_id, cover_image_id: $cover_image_id } + ) +} + +mutation ResetGalleryCover($gallery_id: ID!) { + resetGalleryCover(input: { gallery_id: $gallery_id }) +} diff --git a/ui/v2.5/graphql/mutations/group.graphql b/ui/v2.5/graphql/mutations/group.graphql new file mode 100644 index 00000000000..8065e4adbb7 --- /dev/null +++ b/ui/v2.5/graphql/mutations/group.graphql @@ -0,0 +1,37 @@ +mutation GroupCreate($input: GroupCreateInput!) { + groupCreate(input: $input) { + ...GroupData + } +} + +mutation GroupUpdate($input: GroupUpdateInput!) { + groupUpdate(input: $input) { + ...GroupData + } +} + +mutation BulkGroupUpdate($input: BulkGroupUpdateInput!) { + bulkGroupUpdate(input: $input) { + ...GroupData + } +} + +mutation GroupDestroy($id: ID!) { + groupDestroy(input: { id: $id }) +} + +mutation GroupsDestroy($ids: [ID!]!) { + groupsDestroy(ids: $ids) +} + +mutation AddGroupSubGroups($input: GroupSubGroupAddInput!) { + addGroupSubGroups(input: $input) +} + +mutation RemoveGroupSubGroups($input: GroupSubGroupRemoveInput!) { + removeGroupSubGroups(input: $input) +} + +mutation ReorderSubGroups($input: ReorderSubGroupsInput!) { + reorderSubGroups(input: $input) +} diff --git a/ui/v2.5/graphql/mutations/movie.graphql b/ui/v2.5/graphql/mutations/movie.graphql deleted file mode 100644 index 1eebae15c77..00000000000 --- a/ui/v2.5/graphql/mutations/movie.graphql +++ /dev/null @@ -1,25 +0,0 @@ -mutation MovieCreate($input: MovieCreateInput!) { - movieCreate(input: $input) { - ...MovieData - } -} - -mutation MovieUpdate($input: MovieUpdateInput!) { - movieUpdate(input: $input) { - ...MovieData - } -} - -mutation BulkMovieUpdate($input: BulkMovieUpdateInput!) { - bulkMovieUpdate(input: $input) { - ...MovieData - } -} - -mutation MovieDestroy($id: ID!) { - movieDestroy(input: { id: $id }) -} - -mutation MoviesDestroy($ids: [ID!]!) { - moviesDestroy(ids: $ids) -} diff --git a/ui/v2.5/graphql/mutations/scene.graphql b/ui/v2.5/graphql/mutations/scene.graphql index 874db5c7316..c04857b7fc6 100644 --- a/ui/v2.5/graphql/mutations/scene.graphql +++ b/ui/v2.5/graphql/mutations/scene.graphql @@ -34,6 +34,18 @@ mutation SceneSaveActivity( ) } +mutation SceneResetActivity( + $id: ID! + $reset_resume: Boolean! + $reset_duration: Boolean! +) { + sceneResetActivity( + id: $id + reset_resume: $reset_resume + reset_duration: $reset_duration + ) +} + mutation SceneAddPlay($id: ID!, $times: [Timestamp!]) { sceneAddPlay(id: $id, times: $times) { count diff --git a/ui/v2.5/graphql/mutations/tag.graphql b/ui/v2.5/graphql/mutations/tag.graphql index 20e3b4b81a5..f2138e05702 100644 --- a/ui/v2.5/graphql/mutations/tag.graphql +++ b/ui/v2.5/graphql/mutations/tag.graphql @@ -18,6 +18,12 @@ mutation TagUpdate($input: TagUpdateInput!) { } } +mutation BulkTagUpdate($input: BulkTagUpdateInput!) { + bulkTagUpdate(input: $input) { + ...TagData + } +} + mutation TagsMerge($source: [ID!]!, $destination: ID!) { tagsMerge(input: { source: $source, destination: $destination }) { ...TagData diff --git a/ui/v2.5/graphql/queries/filter.graphql b/ui/v2.5/graphql/queries/filter.graphql index 67fbaf6cf12..276c22d757a 100644 --- a/ui/v2.5/graphql/queries/filter.graphql +++ b/ui/v2.5/graphql/queries/filter.graphql @@ -9,9 +9,3 @@ query FindSavedFilters($mode: FilterMode) { ...SavedFilterData } } - -query FindDefaultFilter($mode: FilterMode!) { - findDefaultFilter(mode: $mode) { - ...SavedFilterData - } -} diff --git a/ui/v2.5/graphql/queries/gallery.graphql b/ui/v2.5/graphql/queries/gallery.graphql index 6c33b9910d9..5c9f786e77f 100644 --- a/ui/v2.5/graphql/queries/gallery.graphql +++ b/ui/v2.5/graphql/queries/gallery.graphql @@ -28,3 +28,11 @@ query FindGalleriesForSelect( } } } + +query FindGalleryImageID($id: ID!, $index: Int!) { + findGallery(id: $id) { + image(index: $index) { + id + } + } +} diff --git a/ui/v2.5/graphql/queries/misc.graphql b/ui/v2.5/graphql/queries/misc.graphql index 9367f0cc254..91aa5f15d13 100644 --- a/ui/v2.5/graphql/queries/misc.graphql +++ b/ui/v2.5/graphql/queries/misc.graphql @@ -16,7 +16,7 @@ query Stats { gallery_count performer_count studio_count - movie_count + group_count tag_count total_o_count total_play_duration diff --git a/ui/v2.5/graphql/queries/movie.graphql b/ui/v2.5/graphql/queries/movie.graphql index 088629b87b6..ad47e908dbd 100644 --- a/ui/v2.5/graphql/queries/movie.graphql +++ b/ui/v2.5/graphql/queries/movie.graphql @@ -1,27 +1,27 @@ -query FindMovies($filter: FindFilterType, $movie_filter: MovieFilterType) { - findMovies(filter: $filter, movie_filter: $movie_filter) { +query FindGroups($filter: FindFilterType, $group_filter: GroupFilterType) { + findGroups(filter: $filter, group_filter: $group_filter) { count - movies { - ...MovieData + groups { + ...GroupData } } } -query FindMovie($id: ID!) { - findMovie(id: $id) { - ...MovieData +query FindGroup($id: ID!) { + findGroup(id: $id) { + ...GroupData } } -query FindMoviesForSelect( +query FindGroupsForSelect( $filter: FindFilterType - $movie_filter: MovieFilterType + $group_filter: GroupFilterType $ids: [ID!] ) { - findMovies(filter: $filter, movie_filter: $movie_filter, ids: $ids) { + findGroups(filter: $filter, group_filter: $group_filter, ids: $ids) { count - movies { - ...SelectMovieData + groups { + ...SelectGroupData } } } diff --git a/ui/v2.5/graphql/queries/scrapers/scrapers.graphql b/ui/v2.5/graphql/queries/scrapers/scrapers.graphql index 366938fd4d5..37e5a3a4ab8 100644 --- a/ui/v2.5/graphql/queries/scrapers/scrapers.graphql +++ b/ui/v2.5/graphql/queries/scrapers/scrapers.graphql @@ -31,11 +31,11 @@ query ListGalleryScrapers { } } -query ListMovieScrapers { - listScrapers(types: [MOVIE]) { +query ListGroupScrapers { + listScrapers(types: [GROUP]) { id name - movie { + group { urls supported_scrapes } @@ -114,9 +114,9 @@ query ScrapeGalleryURL($url: String!) { } } -query ScrapeMovieURL($url: String!) { - scrapeMovieURL(url: $url) { - ...ScrapedMovieData +query ScrapeGroupURL($url: String!) { + scrapeGroupURL(url: $url) { + ...ScrapedGroupData } } diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 1a35cd87035..ca68160622b 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -12,6 +12,9 @@ "lint:css": "stylelint --cache \"src/**/*.scss\"", "lint:js": "eslint --cache src/", "check": "tsc --noEmit", + "eslint": "eslint", + "prettier": "prettier", + "stylelint": "stylelint", "format": "prettier --write . ../../graphql", "format-check": "prettier --check . ../../graphql", "gqlgen": "gql-gen --config codegen.ts", diff --git a/ui/v2.5/src/App.tsx b/ui/v2.5/src/App.tsx index d670786afdb..f3229d36f59 100644 --- a/ui/v2.5/src/App.tsx +++ b/ui/v2.5/src/App.tsx @@ -50,6 +50,7 @@ import { PluginRoutes } from "./plugins"; // import plugin_api to run code import "./pluginApi"; import { ConnectionMonitor } from "./ConnectionMonitor"; +import { PatchFunction } from "./patch"; const Performers = lazyComponent( () => import("./components/Performers/Performers") @@ -65,7 +66,7 @@ const Galleries = lazyComponent( () => import("./components/Galleries/Galleries") ); -const Movies = lazyComponent(() => import("./components/Movies/Movies")); +const Groups = lazyComponent(() => import("./components/Groups/Groups")); const Tags = lazyComponent(() => import("./components/Tags/Tags")); const Images = lazyComponent(() => import("./components/Images/Images")); const Setup = lazyComponent(() => import("./components/Setup/Setup")); @@ -144,6 +145,13 @@ function sortPlugins(plugins: PluginList) { return sorted; } +const AppContainer: React.FC> = PatchFunction( + "App", + (props: React.PropsWithChildren<{}>) => { + return <>{props.children}; + } +) as React.FC; + export const App: React.FC = () => { const config = useConfiguration(); const [saveUI] = useConfigureUI(); @@ -304,7 +312,7 @@ export const App: React.FC = () => { - + { const titleProps = makeTitleProps(); return ( - - {messages ? ( - - + + {messages ? ( + - {maybeRenderReleaseNotes()} - - - }> - - - - - {maybeRenderNavbar()} -
- {renderContent()} -
-
-
-
-
-
-
-
- ) : null} -
+ + {maybeRenderReleaseNotes()} + + + }> + + + + + {maybeRenderNavbar()} +
+ {renderContent()} +
+
+
+
+
+
+
+ + ) : null} + + ); }; diff --git a/ui/v2.5/src/components/Dialogs/SubmitDraft.tsx b/ui/v2.5/src/components/Dialogs/SubmitDraft.tsx index 03d41063138..2521ed02c62 100644 --- a/ui/v2.5/src/components/Dialogs/SubmitDraft.tsx +++ b/ui/v2.5/src/components/Dialogs/SubmitDraft.tsx @@ -52,17 +52,18 @@ export const SubmitStashBoxDraft: React.FC = ({ }, [show, type, boxes, entity]); async function doSubmit() { + if (!selectedBox) return; + + const input = { + id: entity.id, + stash_box_endpoint: selectedBox.endpoint, + }; + if (type === "scene") { - const r = await mutateSubmitStashBoxSceneDraft({ - id: entity.id, - stash_box_index: selectedBoxIndex, - }); + const r = await mutateSubmitStashBoxSceneDraft(input); return r.data?.submitStashBoxSceneDraft; } else if (type === "performer") { - const r = await mutateSubmitStashBoxPerformerDraft({ - id: entity.id, - stash_box_index: selectedBoxIndex, - }); + const r = await mutateSubmitStashBoxPerformerDraft(input); return r.data?.submitStashBoxPerformerDraft; } } diff --git a/ui/v2.5/src/components/FrontPage/Control.tsx b/ui/v2.5/src/components/FrontPage/Control.tsx index 32c4faee962..7ce32cb4449 100644 --- a/ui/v2.5/src/components/FrontPage/Control.tsx +++ b/ui/v2.5/src/components/FrontPage/Control.tsx @@ -7,7 +7,7 @@ import { ConfigurationContext } from "src/hooks/Config"; import { ListFilterModel } from "src/models/list-filter/filter"; import { GalleryRecommendationRow } from "../Galleries/GalleryRecommendationRow"; import { ImageRecommendationRow } from "../Images/ImageRecommendationRow"; -import { MovieRecommendationRow } from "../Movies/MovieRecommendationRow"; +import { GroupRecommendationRow } from "../Groups/GroupRecommendationRow"; import { PerformerRecommendationRow } from "../Performers/PerformerRecommendationRow"; import { SceneRecommendationRow } from "../Scenes/SceneRecommendationRow"; import { StudioRecommendationRow } from "../Studios/StudioRecommendationRow"; @@ -44,8 +44,9 @@ const RecommendationRow: React.FC = ({ mode, filter, header }) => { /> ); case GQL.FilterMode.Movies: + case GQL.FilterMode.Groups: return ( - > = ({ + match, +}) => { + const { id, index: indexStr } = match.params; + + let index = parseInt(indexStr); + if (isNaN(index)) { + index = 0; + } + + const { data, loading, error } = useFindGalleryImageID(id, index); + + if (isNaN(index)) { + return ; + } + + if (loading) return ; + if (error) return ; + if (!data?.findGallery) + return ; + + return ; +}; const Galleries: React.FC = () => { useScrollToTopOnMount(); - return ; + return ; }; const GalleryRoutes: React.FC = () => { @@ -22,6 +54,11 @@ const GalleryRoutes: React.FC = () => { + diff --git a/ui/v2.5/src/components/Galleries/GalleryCard.tsx b/ui/v2.5/src/components/Galleries/GalleryCard.tsx index 0c7491dc6f5..130f6ffd54e 100644 --- a/ui/v2.5/src/components/Galleries/GalleryCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryCard.tsx @@ -14,6 +14,47 @@ import { faBox, faPlayCircle, faTag } from "@fortawesome/free-solid-svg-icons"; import { galleryTitle } from "src/core/galleries"; import ScreenUtils from "src/utils/screen"; import { StudioOverlay } from "../Shared/GridCard/StudioOverlay"; +import { GalleryPreviewScrubber } from "./GalleryPreviewScrubber"; +import cx from "classnames"; +import { useHistory } from "react-router-dom"; + +interface IScenePreviewProps { + isPortrait?: boolean; + gallery: GQL.SlimGalleryDataFragment; + onScrubberClick?: (index: number) => void; +} + +export const GalleryPreview: React.FC = ({ + gallery, + isPortrait = false, + onScrubberClick, +}) => { + const [imgSrc, setImgSrc] = useState( + gallery.paths.cover ?? undefined + ); + + return ( +
+ {!!imgSrc && ( + {gallery.title + )} + {gallery.image_count > 0 && ( + + )} +
+ ); +}; interface IProps { gallery: GQL.SlimGalleryDataFragment; @@ -25,6 +66,7 @@ interface IProps { } export const GalleryCard: React.FC = (props) => { + const history = useHistory(); const [cardWidth, setCardWidth] = useState(); useEffect(() => { @@ -167,14 +209,13 @@ export const GalleryCard: React.FC = (props) => { linkClassName="gallery-card-header" image={ <> - {props.gallery.cover ? ( - {props.gallery.title - ) : undefined} + { + console.log(i); + history.push(`/galleries/${props.gallery.id}/images/${i}`); + }} + /> } diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx index cd06049edf8..7dc2a17b6ac 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/Gallery.tsx @@ -11,6 +11,7 @@ import { Helmet } from "react-helmet"; import * as GQL from "src/core/generated-graphql"; import { mutateMetadataScan, + mutateResetGalleryCover, useFindGallery, useGalleryUpdate, } from "src/core/StashService"; @@ -124,6 +125,7 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { await mutateMetadataScan({ paths: [path], + rescan: true, }); Toast.success( @@ -137,6 +139,25 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { ); } + async function onResetCover() { + try { + await mutateResetGalleryCover({ + gallery_id: gallery.id!, + }); + + Toast.success( + intl.formatMessage( + { id: "toast.updated_entity" }, + { + entity: intl.formatMessage({ id: "gallery" }).toLocaleLowerCase(), + } + ) + ); + } catch (e) { + Toast.error(e); + } + } + async function onClickChapter(imageindex: number) { showLightbox(imageindex - 1); } @@ -175,7 +196,6 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { {path ? ( onRescan()} > @@ -183,12 +203,17 @@ export const GalleryPage: React.FC = ({ gallery, add }) => { ) : undefined} onResetCover()} + > + + + setIsDeleteAlertOpen(true)} > diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx index 4c12b0232d9..6acefcf7dc3 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -1,14 +1,7 @@ -import React, { useEffect, useState } from "react"; +import React, { useEffect, useMemo, useState } from "react"; import { FormattedMessage, useIntl } from "react-intl"; import { Prompt } from "react-router-dom"; -import { - Button, - Dropdown, - DropdownButton, - Form, - Col, - Row, -} from "react-bootstrap"; +import { Button, Form, Col, Row } from "react-bootstrap"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import * as yup from "yup"; @@ -18,12 +11,10 @@ import { useListGalleryScrapers, mutateReloadScrapers, } from "src/core/StashService"; -import { Icon } from "src/components/Shared/Icon"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { useToast } from "src/hooks/Toast"; import { useFormik } from "formik"; import { GalleryScrapeDialog } from "./GalleryScrapeDialog"; -import { faSyncAlt } from "@fortawesome/free-solid-svg-icons"; import isEqual from "lodash-es/isEqual"; import { handleUnsavedChanges } from "src/utils/navigation"; import { @@ -36,9 +27,10 @@ import { yupUniqueStringList, } from "src/utils/yup"; import { formikUtils } from "src/utils/form"; -import { Tag, TagSelect } from "src/components/Tags/TagSelect"; import { Studio, StudioSelect } from "src/components/Studios/StudioSelect"; import { Scene, SceneSelect } from "src/components/Scenes/SceneSelect"; +import { useTagsEdit } from "src/hooks/tagsEdit"; +import { ScraperMenu } from "src/components/Shared/ScraperMenu"; interface IProps { gallery: Partial; @@ -58,13 +50,11 @@ export const GalleryEditPanel: React.FC = ({ const [scenes, setScenes] = useState([]); const [performers, setPerformers] = useState([]); - const [tags, setTags] = useState([]); const [studio, setStudio] = useState(null); const isNew = gallery.id === undefined; - const Scrapers = useListGalleryScrapers(); - const [queryableScrapers, setQueryableScrapers] = useState([]); + const scrapers = useListGalleryScrapers(); const [scrapedGallery, setScrapedGallery] = useState(); @@ -110,6 +100,11 @@ export const GalleryEditPanel: React.FC = ({ onSubmit: (values) => onSave(schema.cast(values)), }); + const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit( + gallery.tags, + (ids) => formik.setFieldValue("tag_ids", ids) + ); + function onSetScenes(items: Scene[]) { setScenes(items); formik.setFieldValue( @@ -126,14 +121,6 @@ export const GalleryEditPanel: React.FC = ({ ); } - function onSetTags(items: Tag[]) { - setTags(items); - formik.setFieldValue( - "tag_ids", - items.map((item) => item.id) - ); - } - function onSetStudio(item: Studio | null) { setStudio(item); formik.setFieldValue("studio_id", item ? item.id : null); @@ -143,10 +130,6 @@ export const GalleryEditPanel: React.FC = ({ setPerformers(gallery.performers ?? []); }, [gallery.performers]); - useEffect(() => { - setTags(gallery.tags ?? []); - }, [gallery.tags]); - useEffect(() => { setStudio(gallery.studio ?? null); }, [gallery.studio]); @@ -173,13 +156,11 @@ export const GalleryEditPanel: React.FC = ({ } }); - useEffect(() => { - const newQueryableScrapers = (Scrapers?.data?.listScrapers ?? []).filter( - (s) => s.gallery?.supported_scrapes.includes(GQL.ScrapeType.Fragment) + const fragmentScrapers = useMemo(() => { + return (scrapers?.data?.listScrapers ?? []).filter((s) => + s.gallery?.supported_scrapes.includes(GQL.ScrapeType.Fragment) ); - - setQueryableScrapers(newQueryableScrapers); - }, [Scrapers]); + }, [scrapers]); async function onSave(input: InputValues) { setIsLoading(true); @@ -192,12 +173,12 @@ export const GalleryEditPanel: React.FC = ({ setIsLoading(false); } - async function onScrapeClicked(scraper: GQL.Scraper) { + async function onScrapeClicked(s: GQL.ScraperSourceInput) { if (!gallery || !gallery.id) return; setIsLoading(true); try { - const result = await queryScrapeGallery(scraper.id, gallery.id); + const result = await queryScrapeGallery(s.scraper_id!, gallery.id); if (!result.data || !result.data.scrapeSingleGallery?.length) { Toast.success("No galleries found"); return; @@ -252,36 +233,8 @@ export const GalleryEditPanel: React.FC = ({ ); } - function renderScraperMenu() { - if (isNew) { - return; - } - - return ( - - {queryableScrapers.map((s) => ( - onScrapeClicked(s)}> - {s.name} - - ))} - onReloadScrapers()}> - - - - - - - - - ); - } - function urlScrapable(scrapedUrl: string): boolean { - return (Scrapers?.data?.listScrapers ?? []).some((s) => + return (scrapers?.data?.listScrapers ?? []).some((s) => (s?.gallery?.urls ?? []).some((u) => scrapedUrl.includes(u)) ); } @@ -339,23 +292,7 @@ export const GalleryEditPanel: React.FC = ({ } } - if (galleryData?.tags?.length) { - const idTags = galleryData.tags.filter((t) => { - return t.stored_id !== undefined && t.stored_id !== null; - }); - - if (idTags.length > 0) { - onSetTags( - idTags.map((p) => { - return { - id: p.stored_id!, - name: p.name ?? "", - aliases: [], - }; - }) - ); - } - } + updateTagsStateFromScraper(galleryData.tags ?? undefined); } async function onScrapeGalleryURL(url: string) { @@ -437,16 +374,7 @@ export const GalleryEditPanel: React.FC = ({ function renderTagsField() { const title = intl.formatMessage({ id: "tags" }); - const control = ( - - ); - - return renderField("tag_ids", title, control, fullWidthProps); + return renderField("tag_ids", title, tagsControl(), fullWidthProps); } function renderDetailsField() { @@ -494,7 +422,16 @@ export const GalleryEditPanel: React.FC = ({ -
{renderScraperMenu()}
+
+ {!isNew && ( + + )} +
diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx index c0c539fa952..1ca450c5e47 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx @@ -3,15 +3,19 @@ import * as GQL from "src/core/generated-graphql"; import { GalleriesCriterion } from "src/models/list-filter/criteria/galleries"; import { ListFilterModel } from "src/models/list-filter/filter"; import { ImageList } from "src/components/Images/ImageList"; -import { mutateRemoveGalleryImages } from "src/core/StashService"; +import { + mutateRemoveGalleryImages, + mutateSetGalleryCover, +} from "src/core/StashService"; import { showWhenSelected, - PersistanceLevel, + showWhenSingleSelection, } from "src/components/List/ItemList"; import { useToast } from "src/hooks/Toast"; import { useIntl } from "react-intl"; import { faMinus } from "@fortawesome/free-solid-svg-icons"; import { galleryTitle } from "src/core/galleries"; +import { View } from "src/components/List/views"; interface IGalleryDetailsProps { active: boolean; @@ -60,6 +64,35 @@ export const GalleryImagesPanel: React.FC = ({ return filter; } + async function setCover( + result: GQL.FindImagesQueryResult, + filter: ListFilterModel, + selectedIds: Set + ) { + const coverImageID = selectedIds.values().next(); + if (coverImageID.done) { + // operation should only be displayed when exactly one image is selected + return; + } + try { + await mutateSetGalleryCover({ + gallery_id: gallery.id!, + cover_image_id: coverImageID.value, + }); + + Toast.success( + intl.formatMessage( + { id: "toast.updated_entity" }, + { + entity: intl.formatMessage({ id: "gallery" }).toLocaleLowerCase(), + } + ) + ); + } catch (e) { + Toast.error(e); + } + } + async function removeImages( result: GQL.FindImagesQueryResult, filter: ListFilterModel, @@ -87,6 +120,11 @@ export const GalleryImagesPanel: React.FC = ({ } const otherOperations = [ + { + text: intl.formatMessage({ id: "actions.set_cover" }), + onClick: setCover, + isDisplayed: showWhenSingleSelection, + }, { text: intl.formatMessage({ id: "actions.remove_from_gallery" }), onClick: removeImages, @@ -102,8 +140,7 @@ export const GalleryImagesPanel: React.FC = ({ filterHook={filterHook} alterQuery={active} extraOperations={otherOperations} - persistState={PersistanceLevel.VIEW} - persistanceKey="galleryimages" + view={View.GalleryImages} chapters={gallery.chapters} /> ); diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx index 1daa2f5e756..dd3357fec83 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx @@ -15,18 +15,17 @@ import { import { ScrapedPerformersRow, ScrapedStudioRow, - ScrapedTagsRow, } from "src/components/Shared/ScrapeDialog/ScrapedObjectsRow"; import { sortStoredIdObjects } from "src/utils/data"; import { Performer } from "src/components/Performers/PerformerSelect"; import { useCreateScrapedPerformer, useCreateScrapedStudio, - useCreateScrapedTag, } from "src/components/Shared/ScrapeDialog/createObjects"; import { uniq } from "lodash-es"; import { Tag } from "src/components/Tags/TagSelect"; import { Studio } from "src/components/Studios/StudioSelect"; +import { useScrapedTags } from "src/components/Shared/ScrapeDialog/scrapedTags"; interface IGalleryScrapeDialogProps { gallery: Partial; @@ -99,19 +98,9 @@ export const GalleryScrapeDialog: React.FC = ({ scraped.performers?.filter((t) => !t.stored_id) ?? [] ); - const [tags, setTags] = useState>( - new ObjectListScrapeResult( - sortStoredIdObjects( - galleryTags.map((t) => ({ - stored_id: t.id, - name: t.name, - })) - ), - sortStoredIdObjects(scraped.tags ?? undefined) - ) - ); - const [newTags, setNewTags] = useState( - scraped.tags?.filter((t) => !t.stored_id) ?? [] + const { tags, newTags, scrapedTagsRow } = useScrapedTags( + galleryTags, + scraped.tags ); const [details, setDetails] = useState>( @@ -131,13 +120,6 @@ export const GalleryScrapeDialog: React.FC = ({ setNewObjects: setNewPerformers, }); - const createNewTag = useCreateScrapedTag({ - scrapeResult: tags, - setScrapeResult: setTags, - newObjects: newTags, - setNewObjects: setNewTags, - }); - // don't show the dialog if nothing was scraped if ( [ @@ -218,13 +200,7 @@ export const GalleryScrapeDialog: React.FC = ({ newObjects={newPerformers} onCreateNew={createNewPerformer} /> - setTags(value)} - newObjects={newTags} - onCreateNew={createNewTag} - /> + {scrapedTagsRow} ListFilterModel; - persistState?: PersistanceLevel; + view?: View; alterQuery?: boolean; } export const GalleryList: React.FC = ({ filterHook, - persistState, + view, alterQuery, }) => { const intl = useIntl(); @@ -46,6 +40,8 @@ export const GalleryList: React.FC = ({ const [isExportDialogOpen, setIsExportDialogOpen] = useState(false); const [isExportAll, setIsExportAll] = useState(false); + const filterMode = GQL.FilterMode.Galleries; + const otherOperations = [ { text: intl.formatMessage({ id: "actions.view_random" }), @@ -188,17 +184,25 @@ export const GalleryList: React.FC = ({ } return ( - + filterHook={filterHook} + view={view} + selectable + > + + ); }; diff --git a/ui/v2.5/src/components/Galleries/GalleryListTable.tsx b/ui/v2.5/src/components/Galleries/GalleryListTable.tsx index 7e6a561886b..017083b11db 100644 --- a/ui/v2.5/src/components/Galleries/GalleryListTable.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryListTable.tsx @@ -43,14 +43,12 @@ export const GalleryListTable: React.FC = ( return ( - {gallery.cover ? ( - {title} - ) : undefined} + {title} ); }; @@ -134,6 +132,16 @@ export const GalleryListTable: React.FC = ( ); + const PathCell = (scene: GQL.SlimGalleryDataFragment) => ( +
    + {scene.files.map((file) => ( +
  • + {file.path} +
  • + ))} +
+ ); + interface IColumnSpec { value: string; label: string; @@ -211,6 +219,11 @@ export const GalleryListTable: React.FC = ( label: intl.formatMessage({ id: "photographer" }), render: (s) => <>{s.photographer}, }, + { + value: "path", + label: intl.formatMessage({ id: "path" }), + render: PathCell, + }, ]; const defaultColumns = allColumns diff --git a/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx b/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx new file mode 100644 index 00000000000..6bc10274ada --- /dev/null +++ b/ui/v2.5/src/components/Galleries/GalleryPreviewScrubber.tsx @@ -0,0 +1,54 @@ +import React, { useEffect, useState } from "react"; +import { useThrottle } from "src/hooks/throttle"; +import { HoverScrubber } from "../Shared/HoverScrubber"; +import cx from "classnames"; + +export const GalleryPreviewScrubber: React.FC<{ + className?: string; + previewPath: string; + defaultPath: string; + imageCount: number; + onClick?: (imageIndex: number) => void; + onPathChanged: React.Dispatch>; +}> = ({ + className, + previewPath, + defaultPath, + imageCount, + onClick, + onPathChanged, +}) => { + const [activeIndex, setActiveIndex] = useState(); + const debounceSetActiveIndex = useThrottle(setActiveIndex, 50); + + function onScrubberClick() { + if (activeIndex === undefined || !onClick) { + return; + } + + onClick(activeIndex); + } + + useEffect(() => { + function getPath() { + if (activeIndex === undefined) { + return defaultPath; + } + + return `${previewPath}/${activeIndex}`; + } + + onPathChanged(getPath()); + }, [activeIndex, defaultPath, previewPath, onPathChanged]); + + return ( +
+ debounceSetActiveIndex(i)} + onClick={() => onScrubberClick()} + /> +
+ ); +}; diff --git a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx index 82b2def9261..f38103d53c4 100644 --- a/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryWallCard.tsx @@ -19,17 +19,20 @@ interface IProps { const GalleryWallCard: React.FC = ({ gallery }) => { const intl = useIntl(); + const [orientation, setOrientation] = React.useState< + "landscape" | "portrait" + >("landscape"); const showLightbox = useGalleryLightbox(gallery.id, gallery.chapters); - const coverFile = gallery?.cover?.files.length - ? gallery.cover.files[0] - : undefined; + const cover = gallery?.paths.cover; + + function onImageLoad(e: React.SyntheticEvent) { + const target = e.target as HTMLImageElement; + setOrientation( + target.naturalWidth > target.naturalHeight ? "landscape" : "portrait" + ); + } - const orientation = - (coverFile?.width ?? 0) > (coverFile?.height ?? 0) - ? "landscape" - : "portrait"; - const cover = gallery?.cover?.paths.thumbnail ?? ""; const title = galleryTitle(gallery); const performerNames = gallery.performers.map((p) => p.name); const performers = @@ -38,6 +41,10 @@ const GalleryWallCard: React.FC = ({ gallery }) => { : performerNames; async function showLightboxStart() { + if (gallery.image_count === 0) { + return; + } + showLightbox(0); } @@ -51,7 +58,13 @@ const GalleryWallCard: React.FC = ({ gallery }) => { tabIndex={0} > - +