diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index 3d15f0c480f..d3444c34f0d 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -33,7 +33,7 @@ jobs: run: docker exec -t build /bin/bash -c "make generate-backend" - name: Run golangci-lint - uses: golangci/golangci-lint-action@v2 + uses: golangci/golangci-lint-action@v3 with: # Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version version: latest @@ -42,19 +42,26 @@ jobs: # working-directory: somedir # Optional: golangci-lint command line arguments. - args: --modules-download-mode=vendor --timeout=5m + # + # Note: By default, the `.golangci.yml` file should be at the root of the repository. + # The location of the configuration file can be changed by using `--config=` + args: --timeout=5m # Optional: show only new issues if it's a pull request. The default value is `false`. # only-new-issues: true - # Optional: if set to true then the action will use pre-installed Go. - # skip-go-installation: true + # Optional: if set to true, then all caching functionality will be completely disabled, + # takes precedence over all other caching options. + # skip-cache: true - # Optional: if set to true then the action don't cache or restore ~/go/pkg. - skip-pkg-cache: true + # Optional: if set to true, then the action won't cache or restore ~/go/pkg. + # skip-pkg-cache: true - # Optional: if set to true then the action don't cache or restore ~/.cache/go-build. - skip-build-cache: true + # Optional: if set to true, then the action won't cache or restore ~/.cache/go-build. + # skip-build-cache: true + + # Optional: The mode to install golangci-lint. It can be 'binary' or 'goinstall'. + # install-mode: "goinstall" - name: Cleanup build container run: docker rm -f -v build diff --git a/.gitignore b/.gitignore index ead0b09f953..2a259b6adfd 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,9 @@ # Go #### +# Vendored dependencies +vendor + # Binaries for programs and plugins *.exe *.exe~ diff --git a/.golangci.yml b/.golangci.yml index 43f7324a0ca..48ca4fd75a7 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -1,7 +1,6 @@ # options for analysis running run: timeout: 5m - modules-download-mode: vendor linters: disable-all: true diff --git a/.mockery.yml b/.mockery.yml new file mode 100644 index 00000000000..0dcb289e3d7 --- /dev/null +++ b/.mockery.yml @@ -0,0 +1,4 @@ +dir: ./pkg/models +name: ".*ReaderWriter" +outpkg: mocks +output: ./pkg/models/mocks diff --git a/Makefile b/Makefile index 70154258553..0f6a43cdffb 100644 --- a/Makefile +++ b/Makefile @@ -319,7 +319,7 @@ it: # generates test mocks .PHONY: generate-test-mocks generate-test-mocks: - go run github.com/vektra/mockery/v2 --dir ./pkg/models --name '.*ReaderWriter' --outpkg mocks --output ./pkg/models/mocks + go run github.com/vektra/mockery/v2 # runs server # sets the config file to use the local dev config diff --git a/README.md b/README.md index 4debcfa9169..9cc62f9b970 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ # Stash -https://stashapp.cc [![Build](https://github.com/stashapp/stash/actions/workflows/build.yml/badge.svg?branch=develop&event=push)](https://github.com/stashapp/stash/actions/workflows/build.yml) [![Docker pulls](https://img.shields.io/docker/pulls/stashapp/stash.svg)](https://hub.docker.com/r/stashapp/stash 'DockerHub') @@ -21,7 +20,7 @@ https://stashapp.cc You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action. -For further information you can [read the in-app manual](ui/v2.5/src/docs/en). +For further information you can consult the [documentation](https://docs.stashapp.cc) or [read the in-app manual](ui/v2.5/src/docs/en). # Installing Stash @@ -49,10 +48,10 @@ Many community-maintained scrapers are available for download from [CommunityScr [StashDB](http://stashdb.org) is the canonical instance of our open source metadata API, [stash-box](https://github.com/stashapp/stash-box). # Translation -[![Translate](https://translate.stashapp.cc/widgets/stash/-/stash-desktop-client/svg-badge.svg)](https://translate.stashapp.cc/engage/stash/) +[![Translate](https://hosted.weblate.org/widget/stashapp/stash/svg-badge.svg)](https://hosted.weblate.org/engage/stashapp/) 🇧🇷 🇨🇳 🇩🇰 🇳🇱 🇬🇧 🇪🇪 🇫🇮 🇫🇷 🇩🇪 🇮🇹 🇯🇵 🇰🇷 🇵🇱 🇷🇺 🇪🇸 🇸🇪 🇹🇼 🇹🇷 -Stash is available in 25 languages (so far!) and it could be in your language too. If you want to help us translate Stash into your language, you can make an account at [translate.stashapp.cc](https://translate.stashapp.cc/projects/stash/stash-desktop-client/) to get started contributing new languages or improving existing ones. Thanks! +Stash is available in 25 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Stash's Weblate](https://hosted.weblate.org/projects/stashapp/stash/) to get started contributing new languages or improving existing ones. Thanks! # Support (FAQ) diff --git a/cmd/phasher/main.go b/cmd/phasher/main.go index f4648b74e2f..99a7491929f 100644 --- a/cmd/phasher/main.go +++ b/cmd/phasher/main.go @@ -8,8 +8,8 @@ import ( flag "github.com/spf13/pflag" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/hash/videophash" + "github.com/stashapp/stash/pkg/models" ) func customUsage() { @@ -28,8 +28,8 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp ffmpeg.FFProbe, inputfile string, quiet * // videoFile.Path (from BaseFile) // videoFile.Duration // The rest of the struct isn't needed. - vf := &file.VideoFile{ - BaseFile: &file.BaseFile{Path: inputfile}, + vf := &models.VideoFile{ + BaseFile: &models.BaseFile{Path: inputfile}, Duration: ffvideoFile.FileDuration, } diff --git a/cmd/stash/main.go b/cmd/stash/main.go index 4aadf4fb1d8..0fbdf2108ad 100644 --- a/cmd/stash/main.go +++ b/cmd/stash/main.go @@ -1,4 +1,4 @@ -//go:generate go run -mod=vendor github.com/99designs/gqlgen +//go:generate go run github.com/99designs/gqlgen package main import ( diff --git a/docker/build/x86_64/Dockerfile b/docker/build/x86_64/Dockerfile index 554c6ff9977..174d6f022fb 100644 --- a/docker/build/x86_64/Dockerfile +++ b/docker/build/x86_64/Dockerfile @@ -21,7 +21,6 @@ RUN apk add --no-cache make alpine-sdk WORKDIR /stash COPY ./go* ./*.go Makefile gqlgen.yml .gqlgenc.yml /stash/ COPY ./scripts /stash/scripts/ -COPY ./vendor /stash/vendor/ COPY ./pkg /stash/pkg/ COPY ./cmd /stash/cmd COPY ./internal /stash/internal diff --git a/docker/build/x86_64/Dockerfile-CUDA b/docker/build/x86_64/Dockerfile-CUDA index 63ecf3d75bb..8195f2324c9 100644 --- a/docker/build/x86_64/Dockerfile-CUDA +++ b/docker/build/x86_64/Dockerfile-CUDA @@ -21,7 +21,6 @@ RUN apt update && apt install -y build-essential golang WORKDIR /stash COPY ./go* ./*.go Makefile gqlgen.yml .gqlgenc.yml /stash/ COPY ./scripts /stash/scripts/ -COPY ./vendor /stash/vendor/ COPY ./pkg /stash/pkg/ COPY ./cmd /stash/cmd COPY ./internal /stash/internal diff --git a/go.mod b/go.mod index bb05736f6c4..5b82dcc4aec 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ require ( github.com/99designs/gqlgen v0.17.2 github.com/Yamashou/gqlgenc v0.0.6 github.com/anacrolix/dms v1.2.2 - github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758 + github.com/antchfx/htmlquery v1.3.0 github.com/chromedp/cdproto v0.0.0-20210622022015-fe1827b46b84 github.com/chromedp/chromedp v0.7.3 github.com/corona10/goimagehash v1.0.3 @@ -66,7 +66,7 @@ require ( require ( github.com/agnivade/levenshtein v1.1.1 // indirect - github.com/antchfx/xpath v1.2.0 // indirect + github.com/antchfx/xpath v1.2.3 // indirect github.com/asticode/go-astikit v0.20.0 // indirect github.com/asticode/go-astits v1.8.0 // indirect github.com/chromedp/sysutil v1.0.0 // indirect diff --git a/go.sum b/go.sum index b9524d0cbc1..e06dae76834 100644 --- a/go.sum +++ b/go.sum @@ -94,10 +94,10 @@ github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNg github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/andybalholm/brotli v1.0.3 h1:fpcw+r1N1h0Poc1F/pHbW40cUm/lMEQslZtCkBQ0UnM= github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig= -github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758 h1:Ldjwcl7T8VqCKgQQ0TfPI8fNb8O/GtMXcYaHlqOu99s= -github.com/antchfx/htmlquery v1.2.5-0.20211125074323-810ee8082758/go.mod h1:2xO6iu3EVWs7R2JYqBbp8YzG50gj/ofqs5/0VZoDZLc= -github.com/antchfx/xpath v1.2.0 h1:mbwv7co+x0RwgeGAOHdrKy89GvHaGvxxBtPK0uF9Zr8= -github.com/antchfx/xpath v1.2.0/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E= +github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8= +github.com/antchfx/xpath v1.2.3 h1:CCZWOzv5bAqjVv0offZ2LVgVYFbeldKQVuLNbViZdes= +github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apache/arrow/go/arrow v0.0.0-20200601151325-b2287a20f230/go.mod h1:QNYViu/X0HXDHw7m3KXzWSVXIbfUvJqBFe6Gj8/pYA0= github.com/apache/arrow/go/arrow v0.0.0-20210521153258-78c88a9f517b/go.mod h1:R4hW3Ug0s+n4CUsWHKOj00Pu01ZqU4x/hSF5kXUcXKQ= @@ -909,7 +909,6 @@ golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= @@ -935,6 +934,7 @@ golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qx golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws= golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/oauth2 v0.0.0-20180227000427-d7d64896b5ff/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1064,10 +1064,12 @@ golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220319134239-a9b59b0215f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0 h1:MUK/U/4lj1t1oPg0HfuXDN/Z1wv31ZJ/YcPiGccS4DU= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ= golang.org/x/term v0.5.0 h1:n2a8QNdAb0sZNpU9R1ALUXBbY+w51fCQDN+7EdxNBsY= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1079,6 +1081,7 @@ golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= diff --git a/gqlgen.yml b/gqlgen.yml index 2439ebc7ca0..ec9feab24a6 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -23,6 +23,12 @@ autobind: models: # Scalars + ID: + model: + - github.com/99designs/gqlgen/graphql.ID + - github.com/99designs/gqlgen/graphql.IntID + - github.com/stashapp/stash/pkg/models.FileID + - github.com/stashapp/stash/pkg/models.FolderID Int64: model: github.com/99designs/gqlgen/graphql.Int64 Timestamp: @@ -33,6 +39,30 @@ models: fields: title: resolver: true + # override models, from internal/api/models.go + BaseFile: + model: github.com/stashapp/stash/internal/api.BaseFile + GalleryFile: + model: github.com/stashapp/stash/internal/api.GalleryFile + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice + VideoFile: + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice + # override float fields - #1572 + duration: + fieldName: DurationFinite + frame_rate: + fieldName: FrameRateFinite + ImageFile: + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice # autobind on config causes generation issues BlobsStorageType: model: github.com/stashapp/stash/internal/manager/config.BlobsStorageType @@ -118,4 +148,6 @@ models: model: github.com/stashapp/stash/internal/identify.MetadataOptions ScraperSourceInput: model: github.com/stashapp/stash/pkg/scraper.Source + SavedFindFilterType: + model: github.com/stashapp/stash/pkg/models.FindFilterType diff --git a/graphql/documents/data/filter.graphql b/graphql/documents/data/filter.graphql index 4c6236668ad..1ced5a44198 100644 --- a/graphql/documents/data/filter.graphql +++ b/graphql/documents/data/filter.graphql @@ -2,5 +2,13 @@ fragment SavedFilterData on SavedFilter { id mode name - filter + find_filter { + q + page + per_page + sort + direction + } + object_filter + ui_options } diff --git a/graphql/documents/data/image-slim.graphql b/graphql/documents/data/image-slim.graphql index 9f84904dcfe..1c7784c9ede 100644 --- a/graphql/documents/data/image-slim.graphql +++ b/graphql/documents/data/image-slim.graphql @@ -2,7 +2,7 @@ fragment SlimImageData on Image { id title date - url + urls rating100 organized o_counter diff --git a/graphql/documents/data/image.graphql b/graphql/documents/data/image.graphql index d55a8108121..64c801401e7 100644 --- a/graphql/documents/data/image.graphql +++ b/graphql/documents/data/image.graphql @@ -3,7 +3,7 @@ fragment ImageData on Image { title rating100 date - url + urls organized o_counter created_at diff --git a/graphql/documents/data/performer-slim.graphql b/graphql/documents/data/performer-slim.graphql index 65019b98b52..5fbd1a2eb6d 100644 --- a/graphql/documents/data/performer-slim.graphql +++ b/graphql/documents/data/performer-slim.graphql @@ -34,3 +34,10 @@ fragment SlimPerformerData on Performer { death_date weight } + +fragment SelectPerformerData on Performer { + id + name + disambiguation + alias_list +} diff --git a/graphql/documents/data/scene-marker.graphql b/graphql/documents/data/scene-marker.graphql index 61439bd1e80..9fd0c7d3ded 100644 --- a/graphql/documents/data/scene-marker.graphql +++ b/graphql/documents/data/scene-marker.graphql @@ -13,12 +13,10 @@ fragment SceneMarkerData on SceneMarker { primary_tag { id name - aliases } tags { id name - aliases } } diff --git a/graphql/documents/data/tag-slim.graphql b/graphql/documents/data/tag-slim.graphql index 26b7c277a5b..e35660de624 100644 --- a/graphql/documents/data/tag-slim.graphql +++ b/graphql/documents/data/tag-slim.graphql @@ -3,4 +3,6 @@ fragment SlimTagData on Tag { name aliases image_path + parent_count + child_count } diff --git a/graphql/documents/queries/performer.graphql b/graphql/documents/queries/performer.graphql index cc25752ac4a..3c3f689c326 100644 --- a/graphql/documents/queries/performer.graphql +++ b/graphql/documents/queries/performer.graphql @@ -1,8 +1,13 @@ query FindPerformers( $filter: FindFilterType $performer_filter: PerformerFilterType + $performer_ids: [Int!] ) { - findPerformers(filter: $filter, performer_filter: $performer_filter) { + findPerformers( + filter: $filter + performer_filter: $performer_filter + performer_ids: $performer_ids + ) { count performers { ...PerformerData @@ -15,3 +20,20 @@ query FindPerformer($id: ID!) { ...PerformerData } } + +query FindPerformersForSelect( + $filter: FindFilterType + $performer_filter: PerformerFilterType + $performer_ids: [Int!] +) { + findPerformers( + filter: $filter + performer_filter: $performer_filter + performer_ids: $performer_ids + ) { + count + performers { + ...SelectPerformerData + } + } +} diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 52f97adab31..4c011ad0db2 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -60,6 +60,7 @@ type Query { findPerformers( performer_filter: PerformerFilterType filter: FindFilterType + performer_ids: [Int!] ): FindPerformersResultType! "Find a studio by ID" @@ -223,11 +224,13 @@ type Query { allSceneMarkers: [SceneMarker!]! allImages: [Image!]! allGalleries: [Gallery!]! - allPerformers: [Performer!]! allStudios: [Studio!]! allMovies: [Movie!]! allTags: [Tag!]! + # @deprecated + allPerformers: [Performer!]! + # Get everything with minimal metadata # Version diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 13165fba875..f0b19026471 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -12,6 +12,17 @@ input FindFilterType { direction: SortDirectionEnum } +type SavedFindFilterType { + q: String + page: Int + """ + use per_page = -1 to indicate all results. Defaults to 25. + """ + per_page: Int + sort: String + direction: SortDirectionEnum +} + enum ResolutionEnum { "144p" VERY_LOW @@ -604,6 +615,13 @@ type SavedFilter { name: String! "JSON-encoded filter string" filter: String! + @deprecated(reason: "use find_filter and object_filter instead") + find_filter: SavedFindFilterType + # maps to any of the AnyFilterInput types + # using a generic Map instead of creating and maintaining match types for inputs + object_filter: Map + # generic map for ui options + ui_options: Map } input SaveFilterInput { @@ -611,8 +629,10 @@ input SaveFilterInput { id: ID mode: FilterMode! name: String! - "JSON-encoded filter string" - filter: String! + find_filter: FindFilterType + object_filter: Map + # generic map for ui options + ui_options: Map } input DestroyFilterInput { @@ -621,6 +641,9 @@ input DestroyFilterInput { input SetDefaultFilterInput { mode: FilterMode! - "JSON-encoded filter string - null to clear" - filter: String + "null to clear" + find_filter: FindFilterType + object_filter: Map + # generic map for ui options + ui_options: Map } diff --git a/graphql/schema/types/image.graphql b/graphql/schema/types/image.graphql index 5d13cbdd6e4..f0307b962ae 100644 --- a/graphql/schema/types/image.graphql +++ b/graphql/schema/types/image.graphql @@ -6,7 +6,8 @@ type Image { rating: Int @deprecated(reason: "Use 1-100 range with rating100") # rating expressed as 1-100 rating100: Int - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!]! date: String o_counter: Int organized: Boolean! @@ -48,7 +49,8 @@ input ImageUpdateInput { # rating expressed as 1-100 rating100: Int organized: Boolean - url: String + url: String @deprecated(reason: "Use urls") + urls: [String!] date: String studio_id: ID @@ -68,7 +70,8 @@ input BulkImageUpdateInput { # rating expressed as 1-100 rating100: Int organized: Boolean - url: String + url: String @deprecated(reason: "Use urls") + urls: BulkUpdateStrings date: String studio_id: ID diff --git a/graphql/schema/types/scene.graphql b/graphql/schema/types/scene.graphql index 6161ff27ff4..8a171439983 100644 --- a/graphql/schema/types/scene.graphql +++ b/graphql/schema/types/scene.graphql @@ -41,7 +41,7 @@ type Scene { details: String director: String url: String @deprecated(reason: "Use urls") - urls: [String!] + urls: [String!]! date: String # rating expressed as 1-5 rating: Int @deprecated(reason: "Use 1-100 range with rating100") diff --git a/graphql/schema/types/tag.graphql b/graphql/schema/types/tag.graphql index 6260856572c..eba9b1996ef 100644 --- a/graphql/schema/types/tag.graphql +++ b/graphql/schema/types/tag.graphql @@ -15,6 +15,9 @@ type Tag { performer_count(depth: Int): Int! # Resolver parents: [Tag!]! children: [Tag!]! + + parent_count: Int! # Resolver + child_count: Int! # Resolver } input TagCreateInput { diff --git a/internal/api/changeset_translator.go b/internal/api/changeset_translator.go index e40b8fe0e48..412f12db99e 100644 --- a/internal/api/changeset_translator.go +++ b/internal/api/changeset_translator.go @@ -7,7 +7,9 @@ import ( "strings" "github.com/99designs/gqlgen/graphql" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) const updateInputField = "input" @@ -91,7 +93,7 @@ func (t changesetTranslator) getFields() []string { return ret } -func (t changesetTranslator) string(value *string, field string) string { +func (t changesetTranslator) string(value *string) string { if value == nil { return "" } @@ -127,7 +129,7 @@ func (t changesetTranslator) optionalDate(value *string, field string) (models.O return models.NewOptionalDate(date), nil } -func (t changesetTranslator) datePtr(value *string, field string) (*models.Date, error) { +func (t changesetTranslator) datePtr(value *string) (*models.Date, error) { if value == nil || *value == "" { return nil, nil } @@ -139,7 +141,7 @@ func (t changesetTranslator) datePtr(value *string, field string) (*models.Date, return &date, nil } -func (t changesetTranslator) intPtrFromString(value *string, field string) (*int, error) { +func (t changesetTranslator) intPtrFromString(value *string) (*int, error) { if value == nil || *value == "" { return nil, nil } @@ -151,35 +153,35 @@ func (t changesetTranslator) intPtrFromString(value *string, field string) (*int return &vv, nil } -func (t changesetTranslator) ratingConversionInt(legacyValue *int, rating100Value *int) *int { +func (t changesetTranslator) ratingConversion(legacyValue *int, rating100Value *int) *int { const ( legacyField = "rating" rating100Field = "rating100" ) legacyRating := t.optionalInt(legacyValue, legacyField) - if legacyRating.Set && !(legacyRating.Null) { - ret := int(models.Rating5To100(int(legacyRating.Value))) + if legacyRating.Set && !legacyRating.Null { + ret := models.Rating5To100(legacyRating.Value) return &ret } o := t.optionalInt(rating100Value, rating100Field) - if o.Set && !(o.Null) { + if o.Set && !o.Null { return &o.Value } return nil } -func (t changesetTranslator) ratingConversionOptional(legacyValue *int, rating100Value *int) models.OptionalInt { +func (t changesetTranslator) optionalRatingConversion(legacyValue *int, rating100Value *int) models.OptionalInt { const ( legacyField = "rating" rating100Field = "rating100" ) legacyRating := t.optionalInt(legacyValue, legacyField) - if legacyRating.Set && !(legacyRating.Null) { - legacyRating.Value = int(models.Rating5To100(int(legacyRating.Value))) + if legacyRating.Set && !legacyRating.Null { + legacyRating.Value = models.Rating5To100(legacyRating.Value) return legacyRating } return t.optionalInt(rating100Value, rating100Field) @@ -212,7 +214,7 @@ func (t changesetTranslator) optionalIntFromString(value *string, field string) return models.NewOptionalInt(vv), nil } -func (t changesetTranslator) bool(value *bool, field string) bool { +func (t changesetTranslator) bool(value *bool) bool { if value == nil { return false } @@ -235,3 +237,191 @@ func (t changesetTranslator) optionalFloat64(value *float64, field string) model return models.NewOptionalFloat64Ptr(value) } + +func (t changesetTranslator) fileIDPtrFromString(value *string) (*models.FileID, error) { + if value == nil || *value == "" { + return nil, nil + } + + vv, err := strconv.Atoi(*value) + if err != nil { + return nil, fmt.Errorf("converting %v to int: %w", *value, err) + } + + id := models.FileID(vv) + return &id, nil +} + +func (t changesetTranslator) fileIDSliceFromStringSlice(value []string) ([]models.FileID, error) { + ints, err := stringslice.StringSliceToIntSlice(value) + if err != nil { + return nil, err + } + + fileIDs := make([]models.FileID, len(ints)) + for i, v := range ints { + fileIDs[i] = models.FileID(v) + } + + return fileIDs, nil +} + +func (t changesetTranslator) relatedIds(value []string) (models.RelatedIDs, error) { + ids, err := stringslice.StringSliceToIntSlice(value) + if err != nil { + return models.RelatedIDs{}, err + } + + return models.NewRelatedIDs(ids), nil +} + +func (t changesetTranslator) updateIds(value []string, field string) (*models.UpdateIDs, error) { + if !t.hasField(field) { + return nil, nil + } + + ids, err := stringslice.StringSliceToIntSlice(value) + if err != nil { + return nil, err + } + + return &models.UpdateIDs{ + IDs: ids, + Mode: models.RelationshipUpdateModeSet, + }, nil +} + +func (t changesetTranslator) updateIdsBulk(value *BulkUpdateIds, field string) (*models.UpdateIDs, error) { + if !t.hasField(field) || value == nil { + return nil, nil + } + + ids, err := stringslice.StringSliceToIntSlice(value.Ids) + if err != nil { + return nil, fmt.Errorf("converting ids [%v]: %w", value.Ids, err) + } + + return &models.UpdateIDs{ + IDs: ids, + Mode: value.Mode, + }, nil +} + +func (t changesetTranslator) optionalURLs(value []string, legacyValue *string) *models.UpdateStrings { + const ( + legacyField = "url" + field = "urls" + ) + + // prefer urls over url + if t.hasField(field) { + return t.updateStrings(value, field) + } else if t.hasField(legacyField) { + var valueSlice []string + if legacyValue != nil { + valueSlice = []string{*legacyValue} + } + return t.updateStrings(valueSlice, legacyField) + } + + return nil +} + +func (t changesetTranslator) optionalURLsBulk(value *BulkUpdateStrings, legacyValue *string) *models.UpdateStrings { + const ( + legacyField = "url" + field = "urls" + ) + + // prefer urls over url + if t.hasField("urls") { + return t.updateStringsBulk(value, field) + } else if t.hasField(legacyField) { + var valueSlice []string + if legacyValue != nil { + valueSlice = []string{*legacyValue} + } + return t.updateStrings(valueSlice, legacyField) + } + + return nil +} + +func (t changesetTranslator) updateStrings(value []string, field string) *models.UpdateStrings { + if !t.hasField(field) { + return nil + } + + return &models.UpdateStrings{ + Values: value, + Mode: models.RelationshipUpdateModeSet, + } +} + +func (t changesetTranslator) updateStringsBulk(value *BulkUpdateStrings, field string) *models.UpdateStrings { + if !t.hasField(field) || value == nil { + return nil + } + + return &models.UpdateStrings{ + Values: value.Values, + Mode: value.Mode, + } +} + +func (t changesetTranslator) updateStashIDs(value []models.StashID, field string) *models.UpdateStashIDs { + if !t.hasField(field) { + return nil + } + + return &models.UpdateStashIDs{ + StashIDs: value, + Mode: models.RelationshipUpdateModeSet, + } +} + +func (t changesetTranslator) relatedMovies(value []models.SceneMovieInput) (models.RelatedMovies, error) { + moviesScenes, err := models.MoviesScenesFromInput(value) + if err != nil { + return models.RelatedMovies{}, err + } + + return models.NewRelatedMovies(moviesScenes), nil +} + +func (t changesetTranslator) updateMovieIDs(value []models.SceneMovieInput, field string) (*models.UpdateMovieIDs, error) { + if !t.hasField(field) { + return nil, nil + } + + moviesScenes, err := models.MoviesScenesFromInput(value) + if err != nil { + return nil, err + } + + return &models.UpdateMovieIDs{ + Movies: moviesScenes, + Mode: models.RelationshipUpdateModeSet, + }, nil +} + +func (t changesetTranslator) updateMovieIDsBulk(value *BulkUpdateIds, field string) (*models.UpdateMovieIDs, error) { + if !t.hasField(field) || value == nil { + return nil, nil + } + + ids, err := stringslice.StringSliceToIntSlice(value.Ids) + if err != nil { + return nil, fmt.Errorf("converting ids [%v]: %w", value.Ids, err) + } + + movies := make([]models.MoviesScenes, len(ids)) + for i, id := range ids { + movies[i] = models.MoviesScenes{MovieID: id} + } + + return &models.UpdateMovieIDs{ + Movies: movies, + Mode: value.Mode, + }, nil +} diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index 30b865632b9..d98c663a146 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -1,14 +1,14 @@ -//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene -//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery -//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image -//go:generate go run -mod=vendor github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer -//go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio -//go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag -//go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie -//go:generate go run -mod=vendor github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/file.ID github.com/stashapp/stash/pkg/file.File -//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID -//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID -//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID +//go:generate go run github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene +//go:generate go run github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery +//go:generate go run github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image +//go:generate go run github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer +//go:generate go run github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio +//go:generate go run github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag +//go:generate go run github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie +//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File +//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID +//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID +//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID package loaders @@ -18,7 +18,6 @@ import ( "time" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -216,8 +215,8 @@ func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models } } -func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file.File, []error) { - return func(keys []file.ID) (ret []file.File, errs []error) { +func (m Middleware) fetchFiles(ctx context.Context) func(keys []models.FileID) ([]models.File, []error) { + return func(keys []models.FileID) (ret []models.File, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.File.Find(ctx, keys...) @@ -227,8 +226,8 @@ func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file } } -func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Scene.GetManyFileIDs(ctx, keys) @@ -238,8 +237,8 @@ func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([] } } -func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Image.GetManyFileIDs(ctx, keys) @@ -249,8 +248,8 @@ func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([] } } -func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Gallery.GetManyFileIDs(ctx, keys) diff --git a/internal/api/loaders/fileloader_gen.go b/internal/api/loaders/fileloader_gen.go index 348dcbb7f09..6289e7a50cf 100644 --- a/internal/api/loaders/fileloader_gen.go +++ b/internal/api/loaders/fileloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // FileLoaderConfig captures the config to create a new FileLoader type FileLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []file.ID) ([]file.File, []error) + Fetch func(keys []models.FileID) ([]models.File, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewFileLoader(config FileLoaderConfig) *FileLoader { // FileLoader batches and caches requests type FileLoader struct { // this method provides the data for the loader - fetch func(keys []file.ID) ([]file.File, []error) + fetch func(keys []models.FileID) ([]models.File, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type FileLoader struct { // INTERNAL // lazily created cache - cache map[file.ID]file.File + cache map[models.FileID]models.File // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -55,26 +55,26 @@ type FileLoader struct { } type fileLoaderBatch struct { - keys []file.ID - data []file.File + keys []models.FileID + data []models.File error []error closing bool done chan struct{} } // Load a File by key, batching and caching will be applied automatically -func (l *FileLoader) Load(key file.ID) (file.File, error) { +func (l *FileLoader) Load(key models.FileID) (models.File, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a File. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { +func (l *FileLoader) LoadThunk(key models.FileID) func() (models.File, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (file.File, error) { + return func() (models.File, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (file.File, error) { + return func() (models.File, error) { <-batch.done - var data file.File + var data models.File if pos < len(batch.data) { data = batch.data[pos] } @@ -113,14 +113,14 @@ func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) { - results := make([]func() (file.File, error), len(keys)) +func (l *FileLoader) LoadAll(keys []models.FileID) ([]models.File, []error) { + results := make([]func() (models.File, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - files := make([]file.File, len(keys)) + files := make([]models.File, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { files[i], errors[i] = thunk() @@ -131,13 +131,13 @@ func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) { // LoadAllThunk returns a function that when called will block waiting for a Files. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) { - results := make([]func() (file.File, error), len(keys)) +func (l *FileLoader) LoadAllThunk(keys []models.FileID) func() ([]models.File, []error) { + results := make([]func() (models.File, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]file.File, []error) { - files := make([]file.File, len(keys)) + return func() ([]models.File, []error) { + files := make([]models.File, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { files[i], errors[i] = thunk() @@ -149,7 +149,7 @@ func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *FileLoader) Prime(key file.ID, value file.File) bool { +func (l *FileLoader) Prime(key models.FileID, value models.File) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -160,22 +160,22 @@ func (l *FileLoader) Prime(key file.ID, value file.File) bool { } // Clear the value at key from the cache, if it exists -func (l *FileLoader) Clear(key file.ID) { +func (l *FileLoader) Clear(key models.FileID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *FileLoader) unsafeSet(key file.ID, value file.File) { +func (l *FileLoader) unsafeSet(key models.FileID, value models.File) { if l.cache == nil { - l.cache = map[file.ID]file.File{} + l.cache = map[models.FileID]models.File{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *fileLoaderBatch) keyIndex(l *FileLoader, key file.ID) int { +func (b *fileLoaderBatch) keyIndex(l *FileLoader, key models.FileID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/api/loaders/galleryfileidsloader_gen.go b/internal/api/loaders/galleryfileidsloader_gen.go index 808cfbf0fa9..e3c53903683 100644 --- a/internal/api/loaders/galleryfileidsloader_gen.go +++ b/internal/api/loaders/galleryfileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // GalleryFileIDsLoaderConfig captures the config to create a new GalleryFileIDsLoader type GalleryFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewGalleryFileIDsLoader(config GalleryFileIDsLoaderConfig) *GalleryFileIDsL // GalleryFileIDsLoader batches and caches requests type GalleryFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type GalleryFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type GalleryFileIDsLoader struct { type galleryFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *GalleryFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *GalleryFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *GalleryFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *GalleryFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *GalleryFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *GalleryFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *GalleryFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/loaders/imagefileidsloader_gen.go b/internal/api/loaders/imagefileidsloader_gen.go index 7e633d8ef01..e19d458ad81 100644 --- a/internal/api/loaders/imagefileidsloader_gen.go +++ b/internal/api/loaders/imagefileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // ImageFileIDsLoaderConfig captures the config to create a new ImageFileIDsLoader type ImageFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewImageFileIDsLoader(config ImageFileIDsLoaderConfig) *ImageFileIDsLoader // ImageFileIDsLoader batches and caches requests type ImageFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type ImageFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type ImageFileIDsLoader struct { type imageFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *ImageFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *ImageFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *ImageFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *ImageFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *ImageFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *ImageFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *ImageFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/loaders/scenefileidsloader_gen.go b/internal/api/loaders/scenefileidsloader_gen.go index 663be2c6fd3..16e1690c4cd 100644 --- a/internal/api/loaders/scenefileidsloader_gen.go +++ b/internal/api/loaders/scenefileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // SceneFileIDsLoaderConfig captures the config to create a new SceneFileIDsLoader type SceneFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewSceneFileIDsLoader(config SceneFileIDsLoaderConfig) *SceneFileIDsLoader // SceneFileIDsLoader batches and caches requests type SceneFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type SceneFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type SceneFileIDsLoader struct { type sceneFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *SceneFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *SceneFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *SceneFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *SceneFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *SceneFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *SceneFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *SceneFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/models.go b/internal/api/models.go index 92713a56e8c..03c20ee4396 100644 --- a/internal/api/models.go +++ b/internal/api/models.go @@ -9,9 +9,16 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) +type BaseFile interface{} + +type GalleryFile struct { + *models.BaseFile +} + var ErrTimestamp = errors.New("cannot parse Timestamp") func MarshalTimestamp(t time.Time) graphql.Marshaler { diff --git a/internal/api/resolver.go b/internal/api/resolver.go index ff74a4456f7..ea0bd256c22 100644 --- a/internal/api/resolver.go +++ b/internal/api/resolver.go @@ -82,6 +82,9 @@ func (r *Resolver) Subscription() SubscriptionResolver { func (r *Resolver) Tag() TagResolver { return &tagResolver{r} } +func (r *Resolver) SavedFilter() SavedFilterResolver { + return &savedFilterResolver{r} +} type mutationResolver struct{ *Resolver } type queryResolver struct{ *Resolver } @@ -96,6 +99,7 @@ type imageResolver struct{ *Resolver } type studioResolver struct{ *Resolver } type movieResolver struct{ *Resolver } type tagResolver struct{ *Resolver } +type savedFilterResolver struct{ *Resolver } func (r *Resolver) withTxn(ctx context.Context, fn func(ctx context.Context) error) error { return txn.WithTxn(ctx, r.txnManager, fn) diff --git a/internal/api/resolver_model_gallery.go b/internal/api/resolver_model_gallery.go index 8157404dcf7..e7c0cd6a04c 100644 --- a/internal/api/resolver_model_gallery.go +++ b/internal/api/resolver_model_gallery.go @@ -2,18 +2,16 @@ package api import ( "context" - "strconv" "time" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/manager/config" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) -func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (file.File, error) { +func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (models.File, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { @@ -26,7 +24,7 @@ func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Galler return nil, nil } -func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]file.File, error) { +func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]models.File, error) { fileIDs, err := loaders.From(ctx).GalleryFiles.Load(obj.ID) if err != nil { return nil, err @@ -45,34 +43,20 @@ func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*Ga ret := make([]*GalleryFile, len(files)) for i, f := range files { - base := f.Base() ret[i] = &GalleryFile{ - ID: strconv.Itoa(int(base.ID)), - Path: base.Path, - Basename: base.Basename, - ParentFolderID: strconv.Itoa(int(base.ParentFolderID)), - ModTime: base.ModTime, - Size: base.Size, - CreatedAt: base.CreatedAt, - UpdatedAt: base.UpdatedAt, - Fingerprints: resolveFingerprints(base), - } - - if base.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*base.ZipFileID)) - ret[i].ZipFileID = &zipFileID + BaseFile: f.Base(), } } return ret, nil } -func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Folder, error) { +func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*models.Folder, error) { if obj.FolderID == nil { return nil, nil } - var ret *file.Folder + var ret *models.Folder if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error @@ -91,25 +75,7 @@ func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Fol return nil, nil } - rr := &Folder{ - ID: ret.ID.String(), - Path: ret.Path, - ModTime: ret.ModTime, - CreatedAt: ret.CreatedAt, - UpdatedAt: ret.UpdatedAt, - } - - if ret.ParentFolderID != nil { - pfidStr := ret.ParentFolderID.String() - rr.ParentFolderID = &pfidStr - } - - if ret.ZipFileID != nil { - zfidStr := ret.ZipFileID.String() - rr.ZipFileID = &zfidStr - } - - return rr, nil + return ret, nil } func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) { diff --git a/internal/api/resolver_model_image.go b/internal/api/resolver_model_image.go index 9bfadafc7a4..f4e699b7ba6 100644 --- a/internal/api/resolver_model_image.go +++ b/internal/api/resolver_model_image.go @@ -3,57 +3,35 @@ package api import ( "context" "fmt" - "strconv" "time" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -func convertImageFile(f *file.ImageFile) *ImageFile { - ret := &ImageFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Size: f.Size, - Width: f.Width, - Height: f.Height, - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), +func convertVisualFile(f models.File) (models.VisualFile, error) { + vf, ok := f.(models.VisualFile) + if !ok { + return nil, fmt.Errorf("file %s is not a visual file", f.Base().Path) } - - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret.ZipFileID = &zipFileID - } - - return ret + return vf, nil } -func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (file.VisualFile, error) { +func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (models.VisualFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { return nil, err } - asFrame, ok := f.(file.VisualFile) - if !ok { - return nil, fmt.Errorf("file %T is not an frame", f) - } - - return asFrame, nil + return convertVisualFile(f) } return nil, nil } -func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]file.File, error) { +func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]models.File, error) { fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) if err != nil { return nil, err @@ -88,30 +66,21 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile }, nil } -func convertVisualFile(f file.File) VisualFile { - switch f := f.(type) { - case *file.ImageFile: - return convertImageFile(f) - case *file.VideoFile: - return convertVideoFile(f) - default: - panic(fmt.Sprintf("unknown file type %T", f)) - } -} - -func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]VisualFile, error) { - fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) +func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]models.VisualFile, error) { + files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) - ret := make([]VisualFile, len(files)) + ret := make([]models.VisualFile, len(files)) for i, f := range files { - ret[i] = convertVisualFile(f) + ret[i], err = convertVisualFile(f) + if err != nil { + return nil, err + } } - return ret, firstError(errs) + return ret, nil } func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, error) { @@ -122,24 +91,22 @@ func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, e return nil, nil } -func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) { +func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*models.ImageFile, error) { files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - var ret []*ImageFile + var ret []*models.ImageFile for _, f := range files { // filter out non-image files - imageFile, ok := f.(*file.ImageFile) + imageFile, ok := f.(*models.ImageFile) if !ok { continue } - thisFile := convertImageFile(imageFile) - - ret = append(ret, thisFile) + ret = append(ret, imageFile) } return ret, nil @@ -231,3 +198,32 @@ func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret ret, errs = loaders.From(ctx).PerformerByID.LoadAll(obj.PerformerIDs.List()) return ret, firstError(errs) } + +func (r *imageResolver) URL(ctx context.Context, obj *models.Image) (*string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Image) + }); err != nil { + return nil, err + } + } + + urls := obj.URLs.List() + if len(urls) == 0 { + return nil, nil + } + + return &urls[0], nil +} + +func (r *imageResolver) Urls(ctx context.Context, obj *models.Image) ([]string, error) { + if !obj.URLs.Loaded() { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + return obj.LoadURLs(ctx, r.repository.Image) + }); err != nil { + return nil, err + } + } + + return obj.URLs.List(), nil +} diff --git a/internal/api/resolver_model_saved_filter.go b/internal/api/resolver_model_saved_filter.go new file mode 100644 index 00000000000..5e1131ab347 --- /dev/null +++ b/internal/api/resolver_model_saved_filter.go @@ -0,0 +1,11 @@ +package api + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" +) + +func (r *savedFilterResolver) Filter(ctx context.Context, obj *models.SavedFilter) (string, error) { + return "", nil +} diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 9d5b41725ce..2593555472f 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -9,50 +9,28 @@ import ( "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -func convertVideoFile(f *file.VideoFile) *VideoFile { - ret := &VideoFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Format: f.Format, - Size: f.Size, - Duration: handleFloat64Value(f.Duration), - VideoCodec: f.VideoCodec, - AudioCodec: f.AudioCodec, - Width: f.Width, - Height: f.Height, - FrameRate: handleFloat64Value(f.FrameRate), - BitRate: int(f.BitRate), - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), - } - - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret.ZipFileID = &zipFileID - } - - return ret +func convertVideoFile(f models.File) (*models.VideoFile, error) { + vf, ok := f.(*models.VideoFile) + if !ok { + return nil, fmt.Errorf("file %T is not a video file", f) + } + return vf, nil } -func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) { +func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*models.VideoFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { return nil, err } - ret, ok := f.(*file.VideoFile) - if !ok { - return nil, fmt.Errorf("file %T is not an image file", f) + ret, err := convertVideoFile(f) + if err != nil { + return nil, err } obj.Files.SetPrimary(ret) @@ -65,26 +43,29 @@ func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) ( return nil, nil } -func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*file.VideoFile, error) { +func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*models.VideoFile, error) { fileIDs, err := loaders.From(ctx).SceneFiles.Load(obj.ID) if err != nil { return nil, err } files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) - ret := make([]*file.VideoFile, len(files)) - for i, bf := range files { - f, ok := bf.(*file.VideoFile) - if !ok { - return nil, fmt.Errorf("file %T is not a video file", f) - } + err = firstError(errs) + if err != nil { + return nil, err + } - ret[i] = f + ret := make([]*models.VideoFile, len(files)) + for i, f := range files { + ret[i], err = convertVideoFile(f) + if err != nil { + return nil, err + } } obj.Files.Set(ret) - return ret, firstError(errs) + return ret, nil } func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) { @@ -132,19 +113,13 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc }, nil } -func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) { +func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*models.VideoFile, error) { files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - ret := make([]*VideoFile, len(files)) - - for i, f := range files { - ret[i] = convertVideoFile(f) - } - - return ret, nil + return files, nil } func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) { @@ -159,28 +134,6 @@ func (r *sceneResolver) Rating100(ctx context.Context, obj *models.Scene) (*int, return obj.Rating, nil } -func resolveFingerprints(f *file.BaseFile) []*Fingerprint { - ret := make([]*Fingerprint, len(f.Fingerprints)) - - for i, fp := range f.Fingerprints { - ret[i] = &Fingerprint{ - Type: fp.Type, - Value: formatFingerprint(fp.Fingerprint), - } - } - - return ret -} - -func formatFingerprint(fp interface{}) string { - switch v := fp.(type) { - case int64: - return strconv.FormatUint(uint64(v), 16) - default: - return fmt.Sprintf("%v", fp) - } -} - func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) config := manager.GetInstance().Config @@ -322,16 +275,6 @@ func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret return ret, firstError(errs) } -func stashIDsSliceToPtrSlice(v []models.StashID) []*models.StashID { - ret := make([]*models.StashID, len(v)) - for i, vv := range v { - c := vv - ret[i] = &c - } - - return ret -} - func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { return obj.LoadStashIDs(ctx, r.repository.Scene) @@ -352,7 +295,7 @@ func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, return nil, nil } - val := f.Fingerprints.Get(file.FingerprintTypePhash) + val := f.Fingerprints.Get(models.FingerprintTypePhash) if val == nil { return nil, nil } diff --git a/internal/api/resolver_model_tag.go b/internal/api/resolver_model_tag.go index 778dc7fa623..9124b18f483 100644 --- a/internal/api/resolver_model_tag.go +++ b/internal/api/resolver_model_tag.go @@ -113,3 +113,25 @@ func (r *tagResolver) ImagePath(ctx context.Context, obj *models.Tag) (*string, imagePath := urlbuilders.NewTagURLBuilder(baseURL, obj).GetTagImageURL(hasImage) return &imagePath, nil } + +func (r *tagResolver) ParentCount(ctx context.Context, obj *models.Tag) (ret int, err error) { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + ret, err = r.repository.Tag.CountByParentTagID(ctx, obj.ID) + return err + }); err != nil { + return ret, err + } + + return ret, nil +} + +func (r *tagResolver) ChildCount(ctx context.Context, obj *models.Tag) (ret int, err error) { + if err := r.withReadTxn(ctx, func(ctx context.Context) error { + ret, err = r.repository.Tag.CountByChildTagID(ctx, obj.ID) + return err + }); err != nil { + return ret, err + } + + return ret, nil +} diff --git a/internal/api/resolver_mutation_file.go b/internal/api/resolver_mutation_file.go index 0b8b84ea0a2..e8fecef80a8 100644 --- a/internal/api/resolver_mutation_file.go +++ b/internal/api/resolver_mutation_file.go @@ -8,6 +8,7 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) @@ -19,13 +20,13 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) mover.RegisterHooks(ctx, r.txnManager) var ( - folder *file.Folder + folder *models.Folder basename string ) fileIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return fmt.Errorf("converting file ids: %w", err) + return fmt.Errorf("converting ids: %w", err) } switch { @@ -34,10 +35,10 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) folderID, err := strconv.Atoi(*input.DestinationFolderID) if err != nil { - return fmt.Errorf("invalid folder id %s: %w", *input.DestinationFolderID, err) + return fmt.Errorf("converting destination folder id: %w", err) } - folder, err = folderStore.Find(ctx, file.FolderID(folderID)) + folder, err = folderStore.Find(ctx, models.FolderID(folderID)) if err != nil { return fmt.Errorf("finding destination folder: %w", err) } @@ -82,7 +83,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) } for _, fileIDInt := range fileIDs { - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) f, err := fileStore.Find(ctx, fileID) if err != nil { return fmt.Errorf("finding file %d: %w", fileID, err) @@ -145,7 +146,7 @@ func (r *mutationResolver) validateFileExtensionList(exts []string, oldBasename, func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret bool, err error) { fileIDs, err := stringslice.StringSliceToIntSlice(ids) if err != nil { - return false, err + return false, fmt.Errorf("converting ids: %w", err) } fileDeleter := file.NewDeleter() @@ -158,7 +159,7 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b qb := r.repository.File for _, fileIDInt := range fileIDs { - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) f, err := qb.Find(ctx, fileID) if err != nil { return err diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go index 368808d2ce6..c7dc8d70f64 100644 --- a/internal/api/resolver_mutation_gallery.go +++ b/internal/api/resolver_mutation_gallery.go @@ -6,7 +6,6 @@ import ( "fmt" "os" "strconv" - "time" "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/file" @@ -18,6 +17,7 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +// used to refetch gallery after hooks run func (r *mutationResolver) getGallery(ctx context.Context, id int) (ret *models.Gallery, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.Gallery.Find(ctx, id) @@ -39,40 +39,36 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat inputMap: getUpdateInputMap(ctx), } - performerIDs, err := stringslice.StringSliceToIntSlice(input.PerformerIds) + // Populate a new gallery from the input + newGallery := models.NewGallery() + + newGallery.Title = input.Title + newGallery.URL = translator.string(input.URL) + newGallery.Details = translator.string(input.Details) + newGallery.Rating = translator.ratingConversion(input.Rating, input.Rating100) + + var err error + + newGallery.Date, err = translator.datePtr(input.Date) if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) + return nil, fmt.Errorf("converting date: %w", err) } - tagIDs, err := stringslice.StringSliceToIntSlice(input.TagIds) + newGallery.StudioID, err = translator.intPtrFromString(input.StudioID) if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) + return nil, fmt.Errorf("converting studio id: %w", err) } - sceneIDs, err := stringslice.StringSliceToIntSlice(input.SceneIds) + + newGallery.PerformerIDs, err = translator.relatedIds(input.PerformerIds) if err != nil { - return nil, fmt.Errorf("converting scene ids: %w", err) + return nil, fmt.Errorf("converting performer ids: %w", err) } - - // Populate a new gallery from the input - currentTime := time.Now() - newGallery := models.Gallery{ - Title: input.Title, - URL: translator.string(input.URL, "url"), - Details: translator.string(input.Details, "details"), - Rating: translator.ratingConversionInt(input.Rating, input.Rating100), - PerformerIDs: models.NewRelatedIDs(performerIDs), - TagIDs: models.NewRelatedIDs(tagIDs), - SceneIDs: models.NewRelatedIDs(sceneIDs), - CreatedAt: currentTime, - UpdatedAt: currentTime, - } - - newGallery.Date, err = translator.datePtr(input.Date, "date") + newGallery.TagIDs, err = translator.relatedIds(input.TagIds) if err != nil { - return nil, fmt.Errorf("converting date: %w", err) + return nil, fmt.Errorf("converting tag ids: %w", err) } - newGallery.StudioID, err = translator.intPtrFromString(input.StudioID, "studio_id") + newGallery.SceneIDs, err = translator.relatedIds(input.SceneIds) if err != nil { - return nil, fmt.Errorf("converting studio id: %w", err) + return nil, fmt.Errorf("converting scene ids: %w", err) } // Start the transaction and save the gallery @@ -140,6 +136,7 @@ func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models. } r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryUpdatePost, input, translator.getFields()) + gallery, err = r.getGallery(ctx, gallery.ID) if err != nil { return nil, err @@ -154,7 +151,7 @@ func (r *mutationResolver) GalleriesUpdate(ctx context.Context, input []*models. func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.GalleryUpdateInput, translator changesetTranslator) (*models.Gallery, error) { galleryID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } qb := r.repository.Gallery @@ -182,62 +179,53 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle updatedGallery.Details = translator.optionalString(input.Details, "details") updatedGallery.URL = translator.optionalString(input.URL, "url") + updatedGallery.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedGallery.Organized = translator.optionalBool(input.Organized, "organized") + updatedGallery.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - updatedGallery.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } - updatedGallery.Organized = translator.optionalBool(input.Organized, "organized") - if input.PrimaryFileID != nil { - primaryFileID, err := strconv.Atoi(*input.PrimaryFileID) - if err != nil { - return nil, fmt.Errorf("converting primary file id: %w", err) - } - - converted := file.ID(primaryFileID) - updatedGallery.PrimaryFileID = &converted + updatedGallery.PrimaryFileID, err = translator.fileIDPtrFromString(input.PrimaryFileID) + if err != nil { + return nil, fmt.Errorf("converting primary file id: %w", err) + } + if updatedGallery.PrimaryFileID != nil { + primaryFileID := *updatedGallery.PrimaryFileID if err := originalGallery.LoadFiles(ctx, r.repository.Gallery); err != nil { return nil, err } // ensure that new primary file is associated with gallery - var f file.File + var f models.File for _, ff := range originalGallery.Files.List() { - if ff.Base().ID == converted { + if ff.Base().ID == primaryFileID { f = ff } } if f == nil { - return nil, fmt.Errorf("file with id %d not associated with gallery", converted) + return nil, fmt.Errorf("file with id %d not associated with gallery", primaryFileID) } } - if translator.hasField("performer_ids") { - updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) - } + updatedGallery.PerformerIDs, err = translator.updateIds(input.PerformerIds, "performer_ids") + if err != nil { + return nil, fmt.Errorf("converting performer ids: %w", err) } - - if translator.hasField("tag_ids") { - updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } + updatedGallery.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } - - if translator.hasField("scene_ids") { - updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting scene ids: %w", err) - } + updatedGallery.SceneIDs, err = translator.updateIds(input.SceneIds, "scene_ids") + if err != nil { + return nil, fmt.Errorf("converting scene ids: %w", err) } // gallery scene is set from the scene only @@ -253,7 +241,7 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGalleryUpdateInput) ([]*models.Gallery, error) { galleryIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return nil, err + return nil, fmt.Errorf("converting ids: %w", err) } translator := changesetTranslator{ @@ -265,36 +253,29 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall updatedGallery.Details = translator.optionalString(input.Details, "details") updatedGallery.URL = translator.optionalString(input.URL, "url") + updatedGallery.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedGallery.Organized = translator.optionalBool(input.Organized, "organized") + updatedGallery.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - updatedGallery.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } - updatedGallery.Organized = translator.optionalBool(input.Organized, "organized") - if translator.hasField("performer_ids") { - updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) - } + updatedGallery.PerformerIDs, err = translator.updateIdsBulk(input.PerformerIds, "performer_ids") + if err != nil { + return nil, fmt.Errorf("converting performer ids: %w", err) } - - if translator.hasField("tag_ids") { - updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } + updatedGallery.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } - - if translator.hasField("scene_ids") { - updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds.Ids, input.SceneIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting scene ids: %w", err) - } + updatedGallery.SceneIDs, err = translator.updateIdsBulk(input.SceneIds, "scene_ids") + if err != nil { + return nil, fmt.Errorf("converting scene ids: %w", err) } ret := []*models.Gallery{} @@ -336,7 +317,7 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) { galleryIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return false, err + return false, fmt.Errorf("converting ids: %w", err) } var galleries []*models.Gallery @@ -427,12 +408,12 @@ func isStashPath(path string) bool { func (r *mutationResolver) AddGalleryImages(ctx context.Context, input GalleryAddInput) (bool, error) { galleryID, err := strconv.Atoi(input.GalleryID) if err != nil { - return false, err + return false, fmt.Errorf("converting gallery id: %w", err) } imageIDs, err := stringslice.StringSliceToIntSlice(input.ImageIds) if err != nil { - return false, err + return false, fmt.Errorf("converting image ids: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -457,12 +438,12 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input GalleryAd func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input GalleryRemoveInput) (bool, error) { galleryID, err := strconv.Atoi(input.GalleryID) if err != nil { - return false, err + return false, fmt.Errorf("converting gallery id: %w", err) } imageIDs, err := stringslice.StringSliceToIntSlice(input.ImageIds) if err != nil { - return false, err + return false, fmt.Errorf("converting image ids: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -501,14 +482,12 @@ func (r *mutationResolver) GalleryChapterCreate(ctx context.Context, input Galle return nil, fmt.Errorf("converting gallery id: %w", err) } - currentTime := time.Now() - newChapter := models.GalleryChapter{ - Title: input.Title, - ImageIndex: input.ImageIndex, - GalleryID: galleryID, - CreatedAt: currentTime, - UpdatedAt: currentTime, - } + // Populate a new gallery chapter from the input + newChapter := models.NewGalleryChapter() + + newChapter.Title = input.Title + newChapter.ImageIndex = input.ImageIndex + newChapter.GalleryID = galleryID // Start the transaction and save the gallery chapter if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -534,7 +513,7 @@ func (r *mutationResolver) GalleryChapterCreate(ctx context.Context, input Galle func (r *mutationResolver) GalleryChapterUpdate(ctx context.Context, input GalleryChapterUpdateInput) (*models.GalleryChapter, error) { chapterID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } translator := changesetTranslator{ @@ -600,7 +579,7 @@ func (r *mutationResolver) GalleryChapterUpdate(ctx context.Context, input Galle func (r *mutationResolver) GalleryChapterDestroy(ctx context.Context, id string) (bool, error) { chapterID, err := strconv.Atoi(id) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 6d5c3a88ab5..8b2cf447831 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -15,6 +15,7 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +// used to refetch image after hooks run func (r *mutationResolver) getImage(ctx context.Context, id int) (ret *models.Image, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.Image.Find(ctx, id) @@ -75,6 +76,7 @@ func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*ImageUpdat } r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageUpdatePost, input, translator.getFields()) + image, err = r.getImage(ctx, image.ID) if err != nil { return nil, err @@ -89,7 +91,7 @@ func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*ImageUpdat func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInput, translator changesetTranslator) (*models.Image, error) { imageID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } i, err := r.repository.Image.Find(ctx, imageID) @@ -105,8 +107,9 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp updatedImage := models.NewImagePartial() updatedImage.Title = translator.optionalString(input.Title, "title") - updatedImage.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) - updatedImage.URL = translator.optionalString(input.URL, "url") + updatedImage.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedImage.Organized = translator.optionalBool(input.Organized, "organized") + updatedImage.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) @@ -115,42 +118,40 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } - updatedImage.Organized = translator.optionalBool(input.Organized, "organized") - if input.PrimaryFileID != nil { - primaryFileID, err := strconv.Atoi(*input.PrimaryFileID) - if err != nil { - return nil, fmt.Errorf("converting primary file id: %w", err) - } + updatedImage.URLs = translator.optionalURLs(input.Urls, input.URL) - converted := file.ID(primaryFileID) - updatedImage.PrimaryFileID = &converted + updatedImage.PrimaryFileID, err = translator.fileIDPtrFromString(input.PrimaryFileID) + if err != nil { + return nil, fmt.Errorf("converting primary file id: %w", err) + } + if updatedImage.PrimaryFileID != nil { + primaryFileID := *updatedImage.PrimaryFileID if err := i.LoadFiles(ctx, r.repository.Image); err != nil { return nil, err } // ensure that new primary file is associated with image - var f file.File + var f models.File for _, ff := range i.Files.List() { - if ff.Base().ID == converted { + if ff.Base().ID == primaryFileID { f = ff } } if f == nil { - return nil, fmt.Errorf("file with id %d not associated with image", converted) + return nil, fmt.Errorf("file with id %d not associated with image", primaryFileID) } } var updatedGalleryIDs []int - if translator.hasField("gallery_ids") { - updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting gallery ids: %w", err) - } - + updatedImage.GalleryIDs, err = translator.updateIds(input.GalleryIds, "gallery_ids") + if err != nil { + return nil, fmt.Errorf("converting gallery ids: %w", err) + } + if updatedImage.GalleryIDs != nil { // ensure gallery IDs are loaded if err := i.LoadGalleryIDs(ctx, r.repository.Image); err != nil { return nil, err @@ -163,18 +164,13 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp updatedGalleryIDs = updatedImage.GalleryIDs.ImpactedIDs(i.GalleryIDs.List()) } - if translator.hasField("performer_ids") { - updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) - } + updatedImage.PerformerIDs, err = translator.updateIds(input.PerformerIds, "performer_ids") + if err != nil { + return nil, fmt.Errorf("converting performer ids: %w", err) } - - if translator.hasField("tag_ids") { - updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } + updatedImage.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } qb := r.repository.Image @@ -196,7 +192,7 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageUpdateInput) (ret []*models.Image, err error) { imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return nil, err + return nil, fmt.Errorf("converting ids: %w", err) } translator := changesetTranslator{ @@ -207,8 +203,9 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU updatedImage := models.NewImagePartial() updatedImage.Title = translator.optionalString(input.Title, "title") - updatedImage.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) - updatedImage.URL = translator.optionalString(input.URL, "url") + updatedImage.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedImage.Organized = translator.optionalBool(input.Organized, "organized") + updatedImage.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) @@ -217,27 +214,20 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } - updatedImage.Organized = translator.optionalBool(input.Organized, "organized") - if translator.hasField("gallery_ids") { - updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting gallery ids: %w", err) - } - } + updatedImage.URLs = translator.optionalURLsBulk(input.Urls, input.URL) - if translator.hasField("performer_ids") { - updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) - } + updatedImage.GalleryIDs, err = translator.updateIdsBulk(input.GalleryIds, "gallery_ids") + if err != nil { + return nil, fmt.Errorf("converting gallery ids: %w", err) } - - if translator.hasField("tag_ids") { - updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } + updatedImage.PerformerIDs, err = translator.updateIdsBulk(input.PerformerIds, "performer_ids") + if err != nil { + return nil, fmt.Errorf("converting performer ids: %w", err) + } + updatedImage.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } // Start the transaction and save the images @@ -308,7 +298,7 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) { imageID, err := strconv.Atoi(input.ID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } var i *models.Image @@ -348,7 +338,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.ImagesDestroyInput) (ret bool, err error) { imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return false, err + return false, fmt.Errorf("converting ids: %w", err) } var images []*models.Image @@ -400,7 +390,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (ret int, err error) { imageID, err := strconv.Atoi(id) if err != nil { - return 0, err + return 0, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -418,7 +408,7 @@ func (r *mutationResolver) ImageIncrementO(ctx context.Context, id string) (ret func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (ret int, err error) { imageID, err := strconv.Atoi(id) if err != nil { - return 0, err + return 0, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -436,7 +426,7 @@ func (r *mutationResolver) ImageDecrementO(ctx context.Context, id string) (ret func (r *mutationResolver) ImageResetO(ctx context.Context, id string) (ret int, err error) { imageID, err := strconv.Atoi(id) if err != nil { - return 0, err + return 0, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_mutation_job.go b/internal/api/resolver_mutation_job.go index 5417468339d..74ced81d5f7 100644 --- a/internal/api/resolver_mutation_job.go +++ b/internal/api/resolver_mutation_job.go @@ -2,17 +2,18 @@ package api import ( "context" + "fmt" "strconv" "github.com/stashapp/stash/internal/manager" ) func (r *mutationResolver) StopJob(ctx context.Context, jobID string) (bool, error) { - idInt, err := strconv.Atoi(jobID) + id, err := strconv.Atoi(jobID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } - manager.GetInstance().JobManager.CancelJob(idInt) + manager.GetInstance().JobManager.CancelJob(id) return true, nil } diff --git a/internal/api/resolver_mutation_movie.go b/internal/api/resolver_mutation_movie.go index b06d84a7f90..ef2d2405afe 100644 --- a/internal/api/resolver_mutation_movie.go +++ b/internal/api/resolver_mutation_movie.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "strconv" - "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" @@ -12,6 +11,7 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +// used to refetch movie after hooks run func (r *mutationResolver) getMovie(ctx context.Context, id int) (ret *models.Movie, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.Movie.Find(ctx, id) @@ -29,26 +29,23 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp } // Populate a new movie from the input - currentTime := time.Now() - newMovie := models.Movie{ - Name: input.Name, - CreatedAt: currentTime, - UpdatedAt: currentTime, - Aliases: translator.string(input.Aliases, "aliases"), - Duration: input.Duration, - Rating: translator.ratingConversionInt(input.Rating, input.Rating100), - Director: translator.string(input.Director, "director"), - Synopsis: translator.string(input.Synopsis, "synopsis"), - URL: translator.string(input.URL, "url"), - } + newMovie := models.NewMovie() + + newMovie.Name = input.Name + newMovie.Aliases = translator.string(input.Aliases) + newMovie.Duration = input.Duration + newMovie.Rating = translator.ratingConversion(input.Rating, input.Rating100) + newMovie.Director = translator.string(input.Director) + newMovie.Synopsis = translator.string(input.Synopsis) + newMovie.URL = translator.string(input.URL) var err error - newMovie.Date, err = translator.datePtr(input.Date, "date") + newMovie.Date, err = translator.datePtr(input.Date) if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - newMovie.StudioID, err = translator.intPtrFromString(input.StudioID, "studio_id") + newMovie.StudioID, err = translator.intPtrFromString(input.StudioID) if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } @@ -64,7 +61,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp if input.FrontImage != nil { frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) if err != nil { - return nil, err + return nil, fmt.Errorf("processing front image: %w", err) } } @@ -73,7 +70,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp if input.BackImage != nil { backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) if err != nil { - return nil, err + return nil, fmt.Errorf("processing back image: %w", err) } } @@ -111,7 +108,7 @@ func (r *mutationResolver) MovieCreate(ctx context.Context, input MovieCreateInp func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInput) (*models.Movie, error) { movieID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } translator := changesetTranslator{ @@ -124,14 +121,15 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp updatedMovie.Name = translator.optionalString(input.Name, "name") updatedMovie.Aliases = translator.optionalString(input.Aliases, "aliases") updatedMovie.Duration = translator.optionalInt(input.Duration, "duration") + updatedMovie.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedMovie.Director = translator.optionalString(input.Director, "director") + updatedMovie.Synopsis = translator.optionalString(input.Synopsis, "synopsis") + updatedMovie.URL = translator.optionalString(input.URL, "url") + updatedMovie.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - updatedMovie.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) - updatedMovie.Director = translator.optionalString(input.Director, "director") - updatedMovie.Synopsis = translator.optionalString(input.Synopsis, "synopsis") - updatedMovie.URL = translator.optionalString(input.URL, "url") updatedMovie.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) @@ -142,7 +140,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp if input.FrontImage != nil { frontimageData, err = utils.ProcessImageInput(ctx, *input.FrontImage) if err != nil { - return nil, err + return nil, fmt.Errorf("processing front image: %w", err) } } @@ -151,7 +149,7 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp if input.BackImage != nil { backimageData, err = utils.ProcessImageInput(ctx, *input.BackImage) if err != nil { - return nil, err + return nil, fmt.Errorf("processing back image: %w", err) } } @@ -189,18 +187,19 @@ func (r *mutationResolver) MovieUpdate(ctx context.Context, input MovieUpdateInp func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieUpdateInput) ([]*models.Movie, error) { movieIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return nil, err + return nil, fmt.Errorf("converting ids: %w", err) } translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } - // populate movie from the input + // Populate movie from the input updatedMovie := models.NewMoviePartial() - updatedMovie.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) + updatedMovie.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) updatedMovie.Director = translator.optionalString(input.Director, "director") + updatedMovie.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) @@ -243,7 +242,7 @@ func (r *mutationResolver) BulkMovieUpdate(ctx context.Context, input BulkMovieU func (r *mutationResolver) MovieDestroy(ctx context.Context, input MovieDestroyInput) (bool, error) { id, err := strconv.Atoi(input.ID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -260,7 +259,7 @@ func (r *mutationResolver) MovieDestroy(ctx context.Context, input MovieDestroyI func (r *mutationResolver) MoviesDestroy(ctx context.Context, movieIDs []string) (bool, error) { ids, err := stringslice.StringSliceToIntSlice(movieIDs) if err != nil { - return false, err + return false, fmt.Errorf("converting ids: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go index 2c23f063a2b..9e40e7a01bf 100644 --- a/internal/api/resolver_mutation_performer.go +++ b/internal/api/resolver_mutation_performer.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "strconv" - "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/performer" @@ -13,6 +12,7 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +// used to refetch performer after hooks run func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *models.Performer, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.Performer.Find(ctx, id) @@ -24,62 +24,45 @@ func (r *mutationResolver) getPerformer(ctx context.Context, id int) (ret *model return ret, nil } -func stashIDPtrSliceToSlice(v []*models.StashID) []models.StashID { - ret := make([]models.StashID, len(v)) - for i, vv := range v { - c := vv - ret[i] = *c - } - - return ret -} - -func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerCreateInput) (*models.Performer, error) { +func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.PerformerCreateInput) (*models.Performer, error) { translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } - tagIDs, err := stringslice.StringSliceToIntSlice(input.TagIds) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } - // Populate a new performer from the input - currentTime := time.Now() - newPerformer := models.Performer{ - Name: input.Name, - Disambiguation: translator.string(input.Disambiguation, "disambiguation"), - URL: translator.string(input.URL, "url"), - Gender: input.Gender, - Ethnicity: translator.string(input.Ethnicity, "ethnicity"), - Country: translator.string(input.Country, "country"), - EyeColor: translator.string(input.EyeColor, "eye_color"), - Measurements: translator.string(input.Measurements, "measurements"), - FakeTits: translator.string(input.FakeTits, "fake_tits"), - PenisLength: input.PenisLength, - Circumcised: input.Circumcised, - CareerLength: translator.string(input.CareerLength, "career_length"), - Tattoos: translator.string(input.Tattoos, "tattoos"), - Piercings: translator.string(input.Piercings, "piercings"), - Twitter: translator.string(input.Twitter, "twitter"), - Instagram: translator.string(input.Instagram, "instagram"), - Favorite: translator.bool(input.Favorite, "favorite"), - Rating: translator.ratingConversionInt(input.Rating, input.Rating100), - Details: translator.string(input.Details, "details"), - HairColor: translator.string(input.HairColor, "hair_color"), - Weight: input.Weight, - IgnoreAutoTag: translator.bool(input.IgnoreAutoTag, "ignore_auto_tag"), - CreatedAt: currentTime, - UpdatedAt: currentTime, - TagIDs: models.NewRelatedIDs(tagIDs), - StashIDs: models.NewRelatedStashIDs(stashIDPtrSliceToSlice(input.StashIds)), - } - - newPerformer.Birthdate, err = translator.datePtr(input.Birthdate, "birthdate") + newPerformer := models.NewPerformer() + + newPerformer.Name = input.Name + newPerformer.Disambiguation = translator.string(input.Disambiguation) + newPerformer.URL = translator.string(input.URL) + newPerformer.Gender = input.Gender + newPerformer.Ethnicity = translator.string(input.Ethnicity) + newPerformer.Country = translator.string(input.Country) + newPerformer.EyeColor = translator.string(input.EyeColor) + newPerformer.Measurements = translator.string(input.Measurements) + newPerformer.FakeTits = translator.string(input.FakeTits) + newPerformer.PenisLength = input.PenisLength + newPerformer.Circumcised = input.Circumcised + newPerformer.CareerLength = translator.string(input.CareerLength) + newPerformer.Tattoos = translator.string(input.Tattoos) + newPerformer.Piercings = translator.string(input.Piercings) + newPerformer.Twitter = translator.string(input.Twitter) + newPerformer.Instagram = translator.string(input.Instagram) + newPerformer.Favorite = translator.bool(input.Favorite) + newPerformer.Rating = translator.ratingConversion(input.Rating, input.Rating100) + newPerformer.Details = translator.string(input.Details) + newPerformer.HairColor = translator.string(input.HairColor) + newPerformer.Weight = input.Weight + newPerformer.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) + newPerformer.StashIDs = models.NewRelatedStashIDs(input.StashIds) + + var err error + + newPerformer.Birthdate, err = translator.datePtr(input.Birthdate) if err != nil { return nil, fmt.Errorf("converting birthdate: %w", err) } - newPerformer.DeathDate, err = translator.datePtr(input.DeathDate, "death_date") + newPerformer.DeathDate, err = translator.datePtr(input.DeathDate) if err != nil { return nil, fmt.Errorf("converting death date: %w", err) } @@ -88,18 +71,24 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC if input.HeightCm != nil { newPerformer.Height = input.HeightCm } else { - newPerformer.Height, err = translator.intPtrFromString(input.Height, "height") + newPerformer.Height, err = translator.intPtrFromString(input.Height) if err != nil { return nil, fmt.Errorf("converting height: %w", err) } } + // prefer alias_list over aliases if input.AliasList != nil { newPerformer.Aliases = models.NewRelatedStrings(input.AliasList) } else if input.Aliases != nil { newPerformer.Aliases = models.NewRelatedStrings(stringslice.FromString(*input.Aliases, ",")) } + newPerformer.TagIDs, err = translator.relatedIds(input.TagIds) + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + if err := performer.ValidateDeathDate(nil, input.Birthdate, input.DeathDate); err != nil { if err != nil { return nil, err @@ -111,7 +100,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC if input.Image != nil { imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { - return nil, err + return nil, fmt.Errorf("processing image: %w", err) } } @@ -140,42 +129,27 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC return r.getPerformer(ctx, newPerformer.ID) } -func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerUpdateInput) (*models.Performer, error) { +func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.PerformerUpdateInput) (*models.Performer, error) { performerID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } - // Populate performer from the input translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } + // Populate performer from the input updatedPerformer := models.NewPerformerPartial() updatedPerformer.Name = translator.optionalString(input.Name, "name") updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation") updatedPerformer.URL = translator.optionalString(input.URL, "url") updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender") - updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") - if err != nil { - return nil, fmt.Errorf("converting birthdate: %w", err) - } updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity") updatedPerformer.Country = translator.optionalString(input.Country, "country") updatedPerformer.EyeColor = translator.optionalString(input.EyeColor, "eye_color") updatedPerformer.Measurements = translator.optionalString(input.Measurements, "measurements") - - // prefer height_cm over height - if translator.hasField("height_cm") { - updatedPerformer.Height = translator.optionalInt(input.HeightCm, "height_cm") - } else if translator.hasField("height") { - updatedPerformer.Height, err = translator.optionalIntFromString(input.Height, "height") - if err != nil { - return nil, err - } - } - updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") updatedPerformer.Circumcised = translator.optionalString((*string)(input.Circumcised), "circumcised") @@ -185,41 +159,46 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU updatedPerformer.Twitter = translator.optionalString(input.Twitter, "twitter") updatedPerformer.Instagram = translator.optionalString(input.Instagram, "instagram") updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite") - updatedPerformer.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) + updatedPerformer.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) updatedPerformer.Details = translator.optionalString(input.Details, "details") - updatedPerformer.DeathDate, err = translator.optionalDate(input.DeathDate, "death_date") - if err != nil { - return nil, fmt.Errorf("converting death date: %w", err) - } updatedPerformer.HairColor = translator.optionalString(input.HairColor, "hair_color") updatedPerformer.Weight = translator.optionalInt(input.Weight, "weight") updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + updatedPerformer.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") - if translator.hasField("alias_list") { - updatedPerformer.Aliases = &models.UpdateStrings{ - Values: input.AliasList, - Mode: models.RelationshipUpdateModeSet, - } - } else if translator.hasField("aliases") { - updatedPerformer.Aliases = &models.UpdateStrings{ - Values: stringslice.FromString(*input.Aliases, ","), - Mode: models.RelationshipUpdateModeSet, - } + updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") + if err != nil { + return nil, fmt.Errorf("converting birthdate: %w", err) + } + updatedPerformer.DeathDate, err = translator.optionalDate(input.DeathDate, "death_date") + if err != nil { + return nil, fmt.Errorf("converting death date: %w", err) } - if translator.hasField("tag_ids") { - updatedPerformer.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet) + // prefer height_cm over height + if translator.hasField("height_cm") { + updatedPerformer.Height = translator.optionalInt(input.HeightCm, "height_cm") + } else if translator.hasField("height") { + updatedPerformer.Height, err = translator.optionalIntFromString(input.Height, "height") if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) + return nil, fmt.Errorf("converting height: %w", err) } } - // Save the stash_ids - if translator.hasField("stash_ids") { - updatedPerformer.StashIDs = &models.UpdateStashIDs{ - StashIDs: stashIDPtrSliceToSlice(input.StashIds), - Mode: models.RelationshipUpdateModeSet, + // prefer alias_list over aliases + if translator.hasField("alias_list") { + updatedPerformer.Aliases = translator.updateStrings(input.AliasList, "alias_list") + } else if translator.hasField("aliases") { + var aliasList []string + if input.Aliases != nil { + aliasList = stringslice.FromString(*input.Aliases, ",") } + updatedPerformer.Aliases = translator.updateStrings(aliasList, "aliases") + } + + updatedPerformer.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } var imageData []byte @@ -227,7 +206,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU if input.Image != nil { imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { - return nil, err + return nil, fmt.Errorf("processing image: %w", err) } } @@ -246,9 +225,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU } if err := performer.ValidateDeathDate(existing, input.Birthdate, input.DeathDate); err != nil { - if err != nil { - return err - } + return err } _, err = qb.UpdatePartial(ctx, performerID, updatedPerformer) @@ -275,37 +252,22 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPerformerUpdateInput) ([]*models.Performer, error) { performerIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return nil, err + return nil, fmt.Errorf("converting ids: %w", err) } - // Populate performer from the input translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } + // Populate performer from the input updatedPerformer := models.NewPerformerPartial() updatedPerformer.Disambiguation = translator.optionalString(input.Disambiguation, "disambiguation") updatedPerformer.URL = translator.optionalString(input.URL, "url") updatedPerformer.Gender = translator.optionalString((*string)(input.Gender), "gender") - updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") - if err != nil { - return nil, fmt.Errorf("converting birthdate: %w", err) - } updatedPerformer.Ethnicity = translator.optionalString(input.Ethnicity, "ethnicity") updatedPerformer.Country = translator.optionalString(input.Country, "country") updatedPerformer.EyeColor = translator.optionalString(input.EyeColor, "eye_color") - - // prefer height_cm over height - if translator.hasField("height_cm") { - updatedPerformer.Height = translator.optionalInt(input.HeightCm, "height_cm") - } else if translator.hasField("height") { - updatedPerformer.Height, err = translator.optionalIntFromString(input.Height, "height") - if err != nil { - return nil, err - } - } - updatedPerformer.Measurements = translator.optionalString(input.Measurements, "measurements") updatedPerformer.FakeTits = translator.optionalString(input.FakeTits, "fake_tits") updatedPerformer.PenisLength = translator.optionalFloat64(input.PenisLength, "penis_length") @@ -316,33 +278,45 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe updatedPerformer.Twitter = translator.optionalString(input.Twitter, "twitter") updatedPerformer.Instagram = translator.optionalString(input.Instagram, "instagram") updatedPerformer.Favorite = translator.optionalBool(input.Favorite, "favorite") - updatedPerformer.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) + updatedPerformer.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) updatedPerformer.Details = translator.optionalString(input.Details, "details") + updatedPerformer.HairColor = translator.optionalString(input.HairColor, "hair_color") + updatedPerformer.Weight = translator.optionalInt(input.Weight, "weight") + updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + + updatedPerformer.Birthdate, err = translator.optionalDate(input.Birthdate, "birthdate") + if err != nil { + return nil, fmt.Errorf("converting birthdate: %w", err) + } updatedPerformer.DeathDate, err = translator.optionalDate(input.DeathDate, "death_date") if err != nil { return nil, fmt.Errorf("converting death date: %w", err) } - updatedPerformer.HairColor = translator.optionalString(input.HairColor, "hair_color") - updatedPerformer.Weight = translator.optionalInt(input.Weight, "weight") - updatedPerformer.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") - if translator.hasField("alias_list") { - updatedPerformer.Aliases = &models.UpdateStrings{ - Values: input.AliasList.Values, - Mode: input.AliasList.Mode, + // prefer height_cm over height + if translator.hasField("height_cm") { + updatedPerformer.Height = translator.optionalInt(input.HeightCm, "height_cm") + } else if translator.hasField("height") { + updatedPerformer.Height, err = translator.optionalIntFromString(input.Height, "height") + if err != nil { + return nil, fmt.Errorf("converting height: %w", err) } + } + + // prefer alias_list over aliases + if translator.hasField("alias_list") { + updatedPerformer.Aliases = translator.updateStringsBulk(input.AliasList, "alias_list") } else if translator.hasField("aliases") { - updatedPerformer.Aliases = &models.UpdateStrings{ - Values: stringslice.FromString(*input.Aliases, ","), - Mode: models.RelationshipUpdateModeSet, + var aliasList []string + if input.Aliases != nil { + aliasList = stringslice.FromString(*input.Aliases, ",") } + updatedPerformer.Aliases = translator.updateStrings(aliasList, "aliases") } - if translator.hasField("tag_ids") { - updatedPerformer.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } + updatedPerformer.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } ret := []*models.Performer{} @@ -362,7 +336,8 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe return fmt.Errorf("performer with id %d not found", performerID) } - if err := performer.ValidateDeathDate(existing, input.Birthdate, input.DeathDate); err != nil { + err = performer.ValidateDeathDate(existing, input.Birthdate, input.DeathDate) + if err != nil { return err } @@ -398,7 +373,7 @@ func (r *mutationResolver) BulkPerformerUpdate(ctx context.Context, input BulkPe func (r *mutationResolver) PerformerDestroy(ctx context.Context, input PerformerDestroyInput) (bool, error) { id, err := strconv.Atoi(input.ID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -415,7 +390,7 @@ func (r *mutationResolver) PerformerDestroy(ctx context.Context, input Performer func (r *mutationResolver) PerformersDestroy(ctx context.Context, performerIDs []string) (bool, error) { ids, err := stringslice.StringSliceToIntSlice(performerIDs) if err != nil { - return false, err + return false, fmt.Errorf("converting ids: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_mutation_saved_filter.go b/internal/api/resolver_mutation_saved_filter.go index a0514546cf2..13b5d87fafa 100644 --- a/internal/api/resolver_mutation_saved_filter.go +++ b/internal/api/resolver_mutation_saved_filter.go @@ -3,6 +3,7 @@ package api import ( "context" "errors" + "fmt" "strconv" "strings" @@ -14,17 +15,11 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput return nil, errors.New("name must be non-empty") } - newFilter := models.SavedFilter{ - Mode: input.Mode, - Name: input.Name, - Filter: input.Filter, - } - var id *int if input.ID != nil { idv, err := strconv.Atoi(*input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } id = &idv } @@ -32,24 +27,34 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.SavedFilter + f := models.SavedFilter{ + Mode: input.Mode, + Name: input.Name, + FindFilter: input.FindFilter, + ObjectFilter: input.ObjectFilter, + UIOptions: input.UIOptions, + } + if id == nil { - err = qb.Create(ctx, &newFilter) + err = qb.Create(ctx, &f) + ret = &f } else { - newFilter.ID = *id - err = qb.Update(ctx, &newFilter) + f.ID = *id + err = qb.Update(ctx, &f) + ret = &f } + return err }); err != nil { return nil, err } - ret = &newFilter return ret, err } func (r *mutationResolver) DestroySavedFilter(ctx context.Context, input DestroyFilterInput) (bool, error) { id, err := strconv.Atoi(input.ID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -65,7 +70,7 @@ func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaul if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.SavedFilter - if input.Filter == nil { + if input.FindFilter == nil && input.ObjectFilter == nil && input.UIOptions == nil { // clearing def, err := qb.FindDefault(ctx, input.Mode) if err != nil { @@ -79,12 +84,12 @@ func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaul return nil } - err := qb.SetDefault(ctx, &models.SavedFilter{ - Mode: input.Mode, - Filter: *input.Filter, + return qb.SetDefault(ctx, &models.SavedFilter{ + Mode: input.Mode, + FindFilter: input.FindFilter, + ObjectFilter: input.ObjectFilter, + UIOptions: input.UIOptions, }) - - return err }); err != nil { return false, err } diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index 5dd929412ae..c7bc876dc7a 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -5,7 +5,6 @@ import ( "errors" "fmt" "strconv" - "time" "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/file" @@ -17,6 +16,7 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +// used to refetch scene after hooks run func (r *mutationResolver) getScene(ctx context.Context, id int) (ret *models.Scene, err error) { if err := r.withTxn(ctx, func(ctx context.Context) error { ret, err = r.repository.Scene.Find(ctx, id) @@ -28,59 +28,32 @@ func (r *mutationResolver) getScene(ctx context.Context, id int) (ret *models.Sc return ret, nil } -func (r *mutationResolver) SceneCreate(ctx context.Context, input SceneCreateInput) (ret *models.Scene, err error) { +func (r *mutationResolver) SceneCreate(ctx context.Context, input models.SceneCreateInput) (ret *models.Scene, err error) { translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } - performerIDs, err := stringslice.StringSliceToIntSlice(input.PerformerIds) - if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) - } - tagIDs, err := stringslice.StringSliceToIntSlice(input.TagIds) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } - galleryIDs, err := stringslice.StringSliceToIntSlice(input.GalleryIds) - if err != nil { - return nil, fmt.Errorf("converting gallery ids: %w", err) - } - - moviesScenes, err := models.MoviesScenesFromInput(input.Movies) - if err != nil { - return nil, fmt.Errorf("converting movies scenes: %w", err) - } - - fileIDsInt, err := stringslice.StringSliceToIntSlice(input.FileIds) + fileIDs, err := translator.fileIDSliceFromStringSlice(input.FileIds) if err != nil { return nil, fmt.Errorf("converting file ids: %w", err) } - fileIDs := make([]file.ID, len(fileIDsInt)) - for i, v := range fileIDsInt { - fileIDs[i] = file.ID(v) - } - // Populate a new scene from the input - newScene := models.Scene{ - Title: translator.string(input.Title, "title"), - Code: translator.string(input.Code, "code"), - Details: translator.string(input.Details, "details"), - Director: translator.string(input.Director, "director"), - Rating: translator.ratingConversionInt(input.Rating, input.Rating100), - Organized: translator.bool(input.Organized, "organized"), - PerformerIDs: models.NewRelatedIDs(performerIDs), - TagIDs: models.NewRelatedIDs(tagIDs), - GalleryIDs: models.NewRelatedIDs(galleryIDs), - Movies: models.NewRelatedMovies(moviesScenes), - StashIDs: models.NewRelatedStashIDs(stashIDPtrSliceToSlice(input.StashIds)), - } - - newScene.Date, err = translator.datePtr(input.Date, "date") + newScene := models.NewScene() + + newScene.Title = translator.string(input.Title) + newScene.Code = translator.string(input.Code) + newScene.Details = translator.string(input.Details) + newScene.Director = translator.string(input.Director) + newScene.Rating = translator.ratingConversion(input.Rating, input.Rating100) + newScene.Organized = translator.bool(input.Organized) + newScene.StashIDs = models.NewRelatedStashIDs(input.StashIds) + + newScene.Date, err = translator.datePtr(input.Date) if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - newScene.StudioID, err = translator.intPtrFromString(input.StudioID, "studio_id") + newScene.StudioID, err = translator.intPtrFromString(input.StudioID) if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } @@ -91,12 +64,30 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input SceneCreateInp newScene.URLs = models.NewRelatedStrings([]string{*input.URL}) } + newScene.PerformerIDs, err = translator.relatedIds(input.PerformerIds) + if err != nil { + return nil, fmt.Errorf("converting performer ids: %w", err) + } + newScene.TagIDs, err = translator.relatedIds(input.TagIds) + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) + } + newScene.GalleryIDs, err = translator.relatedIds(input.GalleryIds) + if err != nil { + return nil, fmt.Errorf("converting gallery ids: %w", err) + } + + newScene.Movies, err = translator.relatedMovies(input.Movies) + if err != nil { + return nil, fmt.Errorf("converting movies: %w", err) + } + var coverImageData []byte if input.CoverImage != nil { var err error coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage) if err != nil { - return nil, err + return nil, fmt.Errorf("processing cover image: %w", err) } } @@ -173,84 +164,51 @@ func (r *mutationResolver) ScenesUpdate(ctx context.Context, input []*models.Sce func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTranslator) (*models.ScenePartial, error) { updatedScene := models.NewScenePartial() - var err error - updatedScene.Title = translator.optionalString(input.Title, "title") updatedScene.Code = translator.optionalString(input.Code, "code") updatedScene.Details = translator.optionalString(input.Details, "details") updatedScene.Director = translator.optionalString(input.Director, "director") + updatedScene.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedScene.OCounter = translator.optionalInt(input.OCounter, "o_counter") + updatedScene.PlayCount = translator.optionalInt(input.PlayCount, "play_count") + updatedScene.PlayDuration = translator.optionalFloat64(input.PlayDuration, "play_duration") + updatedScene.Organized = translator.optionalBool(input.Organized, "organized") + updatedScene.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") + + var err error + updatedScene.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - updatedScene.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) - updatedScene.OCounter = translator.optionalInt(input.OCounter, "o_counter") - updatedScene.PlayCount = translator.optionalInt(input.PlayCount, "play_count") - updatedScene.PlayDuration = translator.optionalFloat64(input.PlayDuration, "play_duration") updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } - updatedScene.Organized = translator.optionalBool(input.Organized, "organized") - - if translator.hasField("urls") { - updatedScene.URLs = &models.UpdateStrings{ - Values: input.Urls, - Mode: models.RelationshipUpdateModeSet, - } - } else if translator.hasField("url") { - updatedScene.URLs = &models.UpdateStrings{ - Values: []string{*input.URL}, - Mode: models.RelationshipUpdateModeSet, - } - } + updatedScene.URLs = translator.optionalURLs(input.Urls, input.URL) - if input.PrimaryFileID != nil { - primaryFileID, err := strconv.Atoi(*input.PrimaryFileID) - if err != nil { - return nil, fmt.Errorf("converting primary file id: %w", err) - } - - converted := file.ID(primaryFileID) - updatedScene.PrimaryFileID = &converted - } - - if translator.hasField("performer_ids") { - updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) - } + updatedScene.PrimaryFileID, err = translator.fileIDPtrFromString(input.PrimaryFileID) + if err != nil { + return nil, fmt.Errorf("converting primary file id: %w", err) } - if translator.hasField("tag_ids") { - updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } + updatedScene.PerformerIDs, err = translator.updateIds(input.PerformerIds, "performer_ids") + if err != nil { + return nil, fmt.Errorf("converting performer ids: %w", err) } - - if translator.hasField("gallery_ids") { - updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet) - if err != nil { - return nil, fmt.Errorf("converting gallery ids: %w", err) - } + updatedScene.TagIDs, err = translator.updateIds(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } - - // Save the movies - if translator.hasField("movies") { - updatedScene.MovieIDs, err = models.UpdateMovieIDsFromInput(input.Movies) - if err != nil { - return nil, fmt.Errorf("converting movie ids: %w", err) - } + updatedScene.GalleryIDs, err = translator.updateIds(input.GalleryIds, "gallery_ids") + if err != nil { + return nil, fmt.Errorf("converting gallery ids: %w", err) } - // Save the stash_ids - if translator.hasField("stash_ids") { - updatedScene.StashIDs = &models.UpdateStashIDs{ - StashIDs: input.StashIds, - Mode: models.RelationshipUpdateModeSet, - } + updatedScene.MovieIDs, err = translator.updateMovieIDs(input.Movies, "movies") + if err != nil { + return nil, fmt.Errorf("converting movies: %w", err) } return &updatedScene, nil @@ -259,7 +217,7 @@ func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTr func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUpdateInput, translator changesetTranslator) (*models.Scene, error) { sceneID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } qb := r.repository.Scene @@ -300,7 +258,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } // ensure that new primary file is associated with scene - var f *file.VideoFile + var f *models.VideoFile for _, ff := range originalScene.Files.List() { if ff.ID == newPrimaryFileID { f = ff @@ -317,7 +275,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp var err error coverImageData, err = utils.ProcessImageInput(ctx, *input.CoverImage) if err != nil { - return nil, err + return nil, fmt.Errorf("processing cover image: %w", err) } } @@ -349,7 +307,7 @@ func (r *mutationResolver) sceneUpdateCoverImage(ctx context.Context, s *models. func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneUpdateInput) ([]*models.Scene, error) { sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids) if err != nil { - return nil, err + return nil, fmt.Errorf("converting ids: %w", err) } translator := changesetTranslator{ @@ -363,57 +321,36 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU updatedScene.Code = translator.optionalString(input.Code, "code") updatedScene.Details = translator.optionalString(input.Details, "details") updatedScene.Director = translator.optionalString(input.Director, "director") + updatedScene.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedScene.Organized = translator.optionalBool(input.Organized, "organized") + updatedScene.Date, err = translator.optionalDate(input.Date, "date") if err != nil { return nil, fmt.Errorf("converting date: %w", err) } - updatedScene.Rating = translator.ratingConversionOptional(input.Rating, input.Rating100) updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id") if err != nil { return nil, fmt.Errorf("converting studio id: %w", err) } - updatedScene.Organized = translator.optionalBool(input.Organized, "organized") + updatedScene.URLs = translator.optionalURLsBulk(input.Urls, input.URL) - if translator.hasField("urls") { - updatedScene.URLs = &models.UpdateStrings{ - Values: input.Urls.Values, - Mode: input.Urls.Mode, - } - } else if translator.hasField("url") { - updatedScene.URLs = &models.UpdateStrings{ - Values: []string{*input.URL}, - Mode: models.RelationshipUpdateModeSet, - } - } - - if translator.hasField("performer_ids") { - updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting performer ids: %w", err) - } + updatedScene.PerformerIDs, err = translator.updateIdsBulk(input.PerformerIds, "performer_ids") + if err != nil { + return nil, fmt.Errorf("converting performer ids: %w", err) } - - if translator.hasField("tag_ids") { - updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting tag ids: %w", err) - } + updatedScene.TagIDs, err = translator.updateIdsBulk(input.TagIds, "tag_ids") + if err != nil { + return nil, fmt.Errorf("converting tag ids: %w", err) } - - if translator.hasField("gallery_ids") { - updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode) - if err != nil { - return nil, fmt.Errorf("converting gallery ids: %w", err) - } + updatedScene.GalleryIDs, err = translator.updateIdsBulk(input.GalleryIds, "gallery_ids") + if err != nil { + return nil, fmt.Errorf("converting gallery ids: %w", err) } - // Save the movies - if translator.hasField("movie_ids") { - updatedScene.MovieIDs, err = translateSceneMovieIDs(*input.MovieIds) - if err != nil { - return nil, fmt.Errorf("converting movie ids: %w", err) - } + updatedScene.MovieIDs, err = translator.updateMovieIDsBulk(input.MovieIds, "movie_ids") + if err != nil { + return nil, fmt.Errorf("converting movie ids: %w", err) } var coverImageData []byte @@ -468,7 +405,7 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) { sceneID, err := strconv.Atoi(input.ID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() @@ -519,6 +456,11 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD } func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.ScenesDestroyInput) (bool, error) { + sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids) + if err != nil { + return false, fmt.Errorf("converting ids: %w", err) + } + var scenes []*models.Scene fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() @@ -534,23 +476,21 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Scene - for _, id := range input.Ids { - sceneID, _ := strconv.Atoi(id) - - s, err := qb.Find(ctx, sceneID) + for _, id := range sceneIDs { + scene, err := qb.Find(ctx, id) if err != nil { return err } - if s == nil { - return fmt.Errorf("scene with id %d not found", sceneID) + if scene == nil { + return fmt.Errorf("scene with id %d not found", id) } - scenes = append(scenes, s) + scenes = append(scenes, scene) // kill any running encoders - manager.KillRunningStreams(s, fileNamingAlgo) + manager.KillRunningStreams(scene, fileNamingAlgo) - if err := r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile); err != nil { + if err := r.sceneService.Destroy(ctx, scene, fileDeleter, deleteGenerated, deleteFile); err != nil { return err } } @@ -580,18 +520,16 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene func (r *mutationResolver) SceneAssignFile(ctx context.Context, input AssignSceneFileInput) (bool, error) { sceneID, err := strconv.Atoi(input.SceneID) if err != nil { - return false, fmt.Errorf("converting scene ID: %w", err) + return false, fmt.Errorf("converting scene id: %w", err) } - fileIDInt, err := strconv.Atoi(input.FileID) + fileID, err := strconv.Atoi(input.FileID) if err != nil { - return false, fmt.Errorf("converting file ID: %w", err) + return false, fmt.Errorf("converting file id: %w", err) } - fileID := file.ID(fileIDInt) - if err := r.withTxn(ctx, func(ctx context.Context) error { - return r.Resolver.sceneService.AssignFile(ctx, sceneID, fileID) + return r.Resolver.sceneService.AssignFile(ctx, sceneID, models.FileID(fileID)) }); err != nil { return false, fmt.Errorf("assigning file to scene: %w", err) } @@ -602,15 +540,17 @@ func (r *mutationResolver) SceneAssignFile(ctx context.Context, input AssignScen func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput) (*models.Scene, error) { srcIDs, err := stringslice.StringSliceToIntSlice(input.Source) if err != nil { - return nil, fmt.Errorf("converting source IDs: %w", err) + return nil, fmt.Errorf("converting source ids: %w", err) } destID, err := strconv.Atoi(input.Destination) if err != nil { - return nil, fmt.Errorf("converting destination ID %s: %w", input.Destination, err) + return nil, fmt.Errorf("converting destination id: %w", err) } var values *models.ScenePartial + var coverImageData []byte + if input.Values != nil { translator := changesetTranslator{ inputMap: getNamedUpdateInputMap(ctx, "input.values"), @@ -620,20 +560,19 @@ func (r *mutationResolver) SceneMerge(ctx context.Context, input SceneMergeInput if err != nil { return nil, err } + + if input.Values.CoverImage != nil { + var err error + coverImageData, err = utils.ProcessImageInput(ctx, *input.Values.CoverImage) + if err != nil { + return nil, fmt.Errorf("processing cover image: %w", err) + } + } } else { v := models.NewScenePartial() values = &v } - var coverImageData []byte - if input.Values.CoverImage != nil { - var err error - coverImageData, err = utils.ProcessImageInput(ctx, *input.Values.CoverImage) - if err != nil { - return nil, err - } - } - var ret *models.Scene if err := r.withTxn(ctx, func(ctx context.Context) error { if err := r.Resolver.sceneService.Merge(ctx, srcIDs, destID, *values); err != nil { @@ -678,15 +617,13 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input SceneMar return nil, fmt.Errorf("converting primary tag id: %w", err) } - currentTime := time.Now() - newMarker := models.SceneMarker{ - Title: input.Title, - Seconds: input.Seconds, - PrimaryTagID: primaryTagID, - SceneID: sceneID, - CreatedAt: currentTime, - UpdatedAt: currentTime, - } + // Populate a new scene marker from the input + newMarker := models.NewSceneMarker() + + newMarker.Title = input.Title + newMarker.Seconds = input.Seconds + newMarker.PrimaryTagID = primaryTagID + newMarker.SceneID = sceneID tagIDs, err := stringslice.StringSliceToIntSlice(input.TagIds) if err != nil { @@ -716,7 +653,7 @@ func (r *mutationResolver) SceneMarkerCreate(ctx context.Context, input SceneMar func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMarkerUpdateInput) (*models.SceneMarker, error) { markerID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } translator := changesetTranslator{ @@ -814,7 +751,7 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) { markerID, err := strconv.Atoi(id) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() @@ -865,7 +802,7 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b func (r *mutationResolver) SceneSaveActivity(ctx context.Context, id string, resumeTime *float64, playDuration *float64) (ret bool, err error) { sceneID, err := strconv.Atoi(id) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -883,7 +820,7 @@ func (r *mutationResolver) SceneSaveActivity(ctx context.Context, id string, res func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id string) (ret int, err error) { sceneID, err := strconv.Atoi(id) if err != nil { - return 0, err + return 0, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -901,7 +838,7 @@ func (r *mutationResolver) SceneIncrementPlayCount(ctx context.Context, id strin func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret int, err error) { sceneID, err := strconv.Atoi(id) if err != nil { - return 0, err + return 0, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -919,7 +856,7 @@ func (r *mutationResolver) SceneIncrementO(ctx context.Context, id string) (ret func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret int, err error) { sceneID, err := strconv.Atoi(id) if err != nil { - return 0, err + return 0, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -937,7 +874,7 @@ func (r *mutationResolver) SceneDecrementO(ctx context.Context, id string) (ret func (r *mutationResolver) SceneResetO(ctx context.Context, id string) (ret int, err error) { sceneID, err := strconv.Atoi(id) if err != nil { - return 0, err + return 0, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_mutation_stash_box.go b/internal/api/resolver_mutation_stash_box.go index cbcfc53401b..2f8593097f9 100644 --- a/internal/api/resolver_mutation_stash_box.go +++ b/internal/api/resolver_mutation_stash_box.go @@ -53,7 +53,7 @@ func (r *mutationResolver) SubmitStashBoxSceneDraft(ctx context.Context, input S id, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } var res *string @@ -95,7 +95,7 @@ func (r *mutationResolver) SubmitStashBoxPerformerDraft(ctx context.Context, inp id, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } var res *string diff --git a/internal/api/resolver_mutation_studio.go b/internal/api/resolver_mutation_studio.go index 626e0d4f481..db314d26109 100644 --- a/internal/api/resolver_mutation_studio.go +++ b/internal/api/resolver_mutation_studio.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "strconv" - "time" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" @@ -13,19 +12,48 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateInput) (*models.Studio, error) { - s, err := studioFromStudioCreateInput(ctx, input) - if err != nil { +// used to refetch studio after hooks run +func (r *mutationResolver) getStudio(ctx context.Context, id int) (ret *models.Studio, err error) { + if err := r.withTxn(ctx, func(ctx context.Context) error { + ret, err = r.repository.Studio.Find(ctx, id) + return err + }); err != nil { return nil, err } + return ret, nil +} + +func (r *mutationResolver) StudioCreate(ctx context.Context, input models.StudioCreateInput) (*models.Studio, error) { + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), + } + + // Populate a new studio from the input + newStudio := models.NewStudio() + + newStudio.Name = input.Name + newStudio.URL = translator.string(input.URL) + newStudio.Rating = translator.ratingConversion(input.Rating, input.Rating100) + newStudio.Details = translator.string(input.Details) + newStudio.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) + newStudio.Aliases = models.NewRelatedStrings(input.Aliases) + newStudio.StashIDs = models.NewRelatedStashIDs(input.StashIds) + + var err error + + newStudio.ParentID, err = translator.intPtrFromString(input.ParentID) + if err != nil { + return nil, fmt.Errorf("converting parent id: %w", err) + } + // Process the base 64 encoded image string var imageData []byte if input.Image != nil { var err error imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { - return nil, err + return nil, fmt.Errorf("processing image: %w", err) } } @@ -33,19 +61,19 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateI if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Studio - if s.Aliases.Loaded() && len(s.Aliases.List()) > 0 { - if err := studio.EnsureAliasesUnique(ctx, 0, s.Aliases.List(), qb); err != nil { + if len(input.Aliases) > 0 { + if err := studio.EnsureAliasesUnique(ctx, 0, input.Aliases, qb); err != nil { return err } } - err = qb.Create(ctx, s) + err = qb.Create(ctx, &newStudio) if err != nil { return err } if len(imageData) > 0 { - if err := qb.UpdateImage(ctx, s.ID, imageData); err != nil { + if err := qb.UpdateImage(ctx, newStudio.ID, imageData); err != nil { return err } } @@ -55,53 +83,37 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateI return nil, err } - r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.StudioCreatePost, input, nil) - - return s, nil + r.hookExecutor.ExecutePostHooks(ctx, newStudio.ID, plugin.StudioCreatePost, input, nil) + return r.getStudio(ctx, newStudio.ID) } -func studioFromStudioCreateInput(ctx context.Context, input StudioCreateInput) (*models.Studio, error) { - translator := changesetTranslator{ - inputMap: getUpdateInputMap(ctx), +func (r *mutationResolver) StudioUpdate(ctx context.Context, input models.StudioUpdateInput) (*models.Studio, error) { + studioID, err := strconv.Atoi(input.ID) + if err != nil { + return nil, fmt.Errorf("converting id: %w", err) } - // Populate a new studio from the input - currentTime := time.Now() - newStudio := models.Studio{ - Name: input.Name, - CreatedAt: currentTime, - UpdatedAt: currentTime, - URL: translator.string(input.URL, "url"), - Rating: translator.ratingConversionInt(input.Rating, input.Rating100), - Details: translator.string(input.Details, "details"), - IgnoreAutoTag: translator.bool(input.IgnoreAutoTag, "ignore_auto_tag"), + translator := changesetTranslator{ + inputMap: getUpdateInputMap(ctx), } - var err error - newStudio.ParentID, err = translator.intPtrFromString(input.ParentID, "parent_id") + // Populate studio from the input + updatedStudio := models.NewStudioPartial() + + updatedStudio.ID = studioID + updatedStudio.Name = translator.optionalString(input.Name, "name") + updatedStudio.URL = translator.optionalString(input.URL, "url") + updatedStudio.Details = translator.optionalString(input.Details, "details") + updatedStudio.Rating = translator.optionalRatingConversion(input.Rating, input.Rating100) + updatedStudio.IgnoreAutoTag = translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag") + updatedStudio.Aliases = translator.updateStrings(input.Aliases, "aliases") + updatedStudio.StashIDs = translator.updateStashIDs(input.StashIds, "stash_ids") + + updatedStudio.ParentID, err = translator.optionalIntFromString(input.ParentID, "parent_id") if err != nil { return nil, fmt.Errorf("converting parent id: %w", err) } - if input.Aliases != nil { - newStudio.Aliases = models.NewRelatedStrings(input.Aliases) - } - if input.StashIds != nil { - newStudio.StashIDs = models.NewRelatedStashIDs(stashIDPtrSliceToSlice(input.StashIds)) - } - - return &newStudio, nil -} - -func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateInput) (*models.Studio, error) { - var updatedStudio *models.Studio - var err error - - translator := changesetTranslator{ - inputMap: getNamedUpdateInputMap(ctx, updateInputField), - } - s := studioPartialFromStudioUpdateInput(input, &input.ID, translator) - // Process the base 64 encoded image string var imageData []byte imageIncluded := translator.hasField("image") @@ -109,7 +121,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateI var err error imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { - return nil, err + return nil, fmt.Errorf("processing image: %w", err) } } @@ -117,17 +129,17 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateI if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.Studio - if err := studio.ValidateModify(ctx, *s, qb); err != nil { + if err := studio.ValidateModify(ctx, updatedStudio, qb); err != nil { return err } - updatedStudio, err = qb.UpdatePartial(ctx, *s) + _, err = qb.UpdatePartial(ctx, updatedStudio) if err != nil { return err } if imageIncluded { - if err := qb.UpdateImage(ctx, s.ID, imageData); err != nil { + if err := qb.UpdateImage(ctx, studioID, imageData); err != nil { return err } } @@ -137,57 +149,14 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateI return nil, err } - r.hookExecutor.ExecutePostHooks(ctx, updatedStudio.ID, plugin.StudioUpdatePost, input, translator.getFields()) - - return updatedStudio, nil -} - -// This is slightly different to studioPartialFromStudioCreateInput in that Name is handled differently -// and ImageIncluded is not hardcoded to true -func studioPartialFromStudioUpdateInput(input StudioUpdateInput, id *string, translator changesetTranslator) *models.StudioPartial { - // Populate studio from the input - updatedStudio := models.StudioPartial{ - Name: translator.optionalString(input.Name, "name"), - URL: translator.optionalString(input.URL, "url"), - Details: translator.optionalString(input.Details, "details"), - Rating: translator.ratingConversionOptional(input.Rating, input.Rating100), - IgnoreAutoTag: translator.optionalBool(input.IgnoreAutoTag, "ignore_auto_tag"), - UpdatedAt: models.NewOptionalTime(time.Now()), - } - - updatedStudio.ID, _ = strconv.Atoi(*id) - - if input.ParentID != nil { - parentID, _ := strconv.Atoi(*input.ParentID) - if parentID > 0 { - // This is to be set directly as we know it has a value and the translator won't have the field - updatedStudio.ParentID = models.NewOptionalInt(parentID) - } - } else { - updatedStudio.ParentID = translator.optionalInt(nil, "parent_id") - } - - if translator.hasField("aliases") { - updatedStudio.Aliases = &models.UpdateStrings{ - Values: input.Aliases, - Mode: models.RelationshipUpdateModeSet, - } - } - - if translator.hasField("stash_ids") { - updatedStudio.StashIDs = &models.UpdateStashIDs{ - StashIDs: stashIDPtrSliceToSlice(input.StashIds), - Mode: models.RelationshipUpdateModeSet, - } - } - - return &updatedStudio + r.hookExecutor.ExecutePostHooks(ctx, studioID, plugin.StudioUpdatePost, input, translator.getFields()) + return r.getStudio(ctx, studioID) } func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestroyInput) (bool, error) { id, err := strconv.Atoi(input.ID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -204,7 +173,7 @@ func (r *mutationResolver) StudioDestroy(ctx context.Context, input StudioDestro func (r *mutationResolver) StudiosDestroy(ctx context.Context, studioIDs []string) (bool, error) { ids, err := stringslice.StringSliceToIntSlice(studioIDs) if err != nil { - return false, err + return false, fmt.Errorf("converting ids: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_mutation_tag.go b/internal/api/resolver_mutation_tag.go index 51c9fa7ab26..cec4a77726c 100644 --- a/internal/api/resolver_mutation_tag.go +++ b/internal/api/resolver_mutation_tag.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "strconv" - "time" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -31,14 +30,11 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) } // Populate a new tag from the input - currentTime := time.Now() - newTag := models.Tag{ - Name: input.Name, - CreatedAt: currentTime, - UpdatedAt: currentTime, - Description: translator.string(input.Description, "description"), - IgnoreAutoTag: translator.bool(input.IgnoreAutoTag, "ignore_auto_tag"), - } + newTag := models.NewTag() + + newTag.Name = input.Name + newTag.Description = translator.string(input.Description) + newTag.IgnoreAutoTag = translator.bool(input.IgnoreAutoTag) var err error @@ -46,7 +42,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) if len(input.ParentIds) > 0 { parentIDs, err = stringslice.StringSliceToIntSlice(input.ParentIds) if err != nil { - return nil, err + return nil, fmt.Errorf("converting parent ids: %w", err) } } @@ -54,7 +50,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) if len(input.ChildIds) > 0 { childIDs, err = stringslice.StringSliceToIntSlice(input.ChildIds) if err != nil { - return nil, err + return nil, fmt.Errorf("converting child ids: %w", err) } } @@ -63,7 +59,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) if input.Image != nil { imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { - return nil, err + return nil, fmt.Errorf("processing image: %w", err) } } @@ -130,7 +126,7 @@ func (r *mutationResolver) TagCreate(ctx context.Context, input TagCreateInput) func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) (*models.Tag, error) { tagID, err := strconv.Atoi(input.ID) if err != nil { - return nil, err + return nil, fmt.Errorf("converting id: %w", err) } translator := changesetTranslator{ @@ -147,7 +143,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) if translator.hasField("parent_ids") { parentIDs, err = stringslice.StringSliceToIntSlice(input.ParentIds) if err != nil { - return nil, err + return nil, fmt.Errorf("converting parent ids: %w", err) } } @@ -155,7 +151,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) if translator.hasField("child_ids") { childIDs, err = stringslice.StringSliceToIntSlice(input.ChildIds) if err != nil { - return nil, err + return nil, fmt.Errorf("converting child ids: %w", err) } } @@ -164,7 +160,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) if input.Image != nil { imageData, err = utils.ProcessImageInput(ctx, *input.Image) if err != nil { - return nil, err + return nil, fmt.Errorf("processing image: %w", err) } } @@ -246,7 +242,7 @@ func (r *mutationResolver) TagUpdate(ctx context.Context, input TagUpdateInput) func (r *mutationResolver) TagDestroy(ctx context.Context, input TagDestroyInput) (bool, error) { tagID, err := strconv.Atoi(input.ID) if err != nil { - return false, err + return false, fmt.Errorf("converting id: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -263,7 +259,7 @@ func (r *mutationResolver) TagDestroy(ctx context.Context, input TagDestroyInput func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bool, error) { ids, err := stringslice.StringSliceToIntSlice(tagIDs) if err != nil { - return false, err + return false, fmt.Errorf("converting ids: %w", err) } if err := r.withTxn(ctx, func(ctx context.Context) error { @@ -289,12 +285,12 @@ func (r *mutationResolver) TagsDestroy(ctx context.Context, tagIDs []string) (bo func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) (*models.Tag, error) { source, err := stringslice.StringSliceToIntSlice(input.Source) if err != nil { - return nil, err + return nil, fmt.Errorf("converting source ids: %w", err) } destination, err := strconv.Atoi(input.Destination) if err != nil { - return nil, err + return nil, fmt.Errorf("converting destination id: %w", err) } if len(source) == 0 { @@ -345,5 +341,6 @@ func (r *mutationResolver) TagsMerge(ctx context.Context, input TagsMergeInput) } r.hookExecutor.ExecutePostHooks(ctx, t.ID, plugin.TagMergePost, input, nil) + return t, nil } diff --git a/internal/api/resolver_query_find_performer.go b/internal/api/resolver_query_find_performer.go index 437ac8fcf04..a47b7a18dc5 100644 --- a/internal/api/resolver_query_find_performer.go +++ b/internal/api/resolver_query_find_performer.go @@ -23,9 +23,19 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode return ret, nil } -func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *FindPerformersResultType, err error) { +func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType, performerIDs []int) (ret *FindPerformersResultType, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - performers, total, err := r.repository.Performer.Query(ctx, performerFilter, filter) + var performers []*models.Performer + var err error + var total int + + if len(performerIDs) > 0 { + performers, err = r.repository.Performer.FindMany(ctx, performerIDs) + total = len(performers) + } else { + performers, total, err = r.repository.Performer.Query(ctx, performerFilter, filter) + } + if err != nil { return err } @@ -34,6 +44,7 @@ func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *mod Count: total, Performers: performers, } + return nil }); err != nil { return nil, err diff --git a/internal/api/resolver_query_scraper.go b/internal/api/resolver_query_scraper.go index 7b7694341ba..0220316b2fb 100644 --- a/internal/api/resolver_query_scraper.go +++ b/internal/api/resolver_query_scraper.go @@ -129,7 +129,9 @@ func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene return nil, err } - filterSceneTags([]*scraper.ScrapedScene{ret}) + if ret != nil { + filterSceneTags([]*scraper.ScrapedScene{ret}) + } return ret, nil } @@ -190,7 +192,9 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scrape return nil, err } - filterSceneTags([]*scraper.ScrapedScene{ret}) + if ret != nil { + filterSceneTags([]*scraper.ScrapedScene{ret}) + } return ret, nil } diff --git a/internal/api/routes_image.go b/internal/api/routes_image.go index 4ea612d3b73..4cc2576718c 100644 --- a/internal/api/routes_image.go +++ b/internal/api/routes_image.go @@ -22,14 +22,14 @@ import ( ) type ImageFinder interface { - Find(ctx context.Context, id int) (*models.Image, error) + models.ImageGetter FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error) } type imageRoutes struct { txnManager txn.Manager imageFinder ImageFinder - fileFinder file.Finder + fileGetter models.FileGetter } func (rs imageRoutes) Routes() chi.Router { @@ -168,7 +168,7 @@ func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler { } if image != nil { - if err := image.LoadPrimaryFile(ctx, rs.fileFinder); err != nil { + if err := image.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { if !errors.Is(err, context.Canceled) { logger.Errorf("error loading primary file for image %d: %v", imageID, err) } diff --git a/internal/api/routes_movie.go b/internal/api/routes_movie.go index a64aae76cf8..400587763b5 100644 --- a/internal/api/routes_movie.go +++ b/internal/api/routes_movie.go @@ -14,9 +14,9 @@ import ( ) type MovieFinder interface { + models.MovieGetter GetFrontImage(ctx context.Context, movieID int) ([]byte, error) GetBackImage(ctx context.Context, movieID int) ([]byte, error) - Find(ctx context.Context, id int) (*models.Movie, error) } type movieRoutes struct { diff --git a/internal/api/routes_performer.go b/internal/api/routes_performer.go index e7631de5b84..d05e5309570 100644 --- a/internal/api/routes_performer.go +++ b/internal/api/routes_performer.go @@ -15,7 +15,7 @@ import ( ) type PerformerFinder interface { - Find(ctx context.Context, id int) (*models.Performer, error) + models.PerformerGetter GetImage(ctx context.Context, performerID int) ([]byte, error) } diff --git a/internal/api/routes_scene.go b/internal/api/routes_scene.go index 43d37da36e0..e0584d6888b 100644 --- a/internal/api/routes_scene.go +++ b/internal/api/routes_scene.go @@ -12,40 +12,43 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type SceneFinder interface { - manager.SceneCoverGetter + models.SceneGetter - scene.IDFinder FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) + GetCover(ctx context.Context, sceneID int) ([]byte, error) } type SceneMarkerFinder interface { - Find(ctx context.Context, id int) (*models.SceneMarker, error) + models.SceneMarkerGetter FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) } +type SceneMarkerTagFinder interface { + models.TagGetter + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) +} + type CaptionFinder interface { - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) + GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) } type sceneRoutes struct { txnManager txn.Manager sceneFinder SceneFinder - fileFinder file.Finder + fileGetter models.FileGetter captionFinder CaptionFinder sceneMarkerFinder SceneMarkerFinder - tagFinder scene.MarkerTagFinder + tagFinder SceneMarkerTagFinder } func (rs sceneRoutes) Routes() chi.Router { @@ -574,7 +577,7 @@ func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler { scene, _ = qb.Find(ctx, sceneID) if scene != nil { - if err := scene.LoadPrimaryFile(ctx, rs.fileFinder); err != nil { + if err := scene.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { if !errors.Is(err, context.Canceled) { logger.Errorf("error loading primary file for scene %d: %v", sceneID, err) } diff --git a/internal/api/routes_studio.go b/internal/api/routes_studio.go index ca4e580f6a7..1cce3938532 100644 --- a/internal/api/routes_studio.go +++ b/internal/api/routes_studio.go @@ -11,13 +11,12 @@ import ( "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type StudioFinder interface { - studio.Finder + models.StudioGetter GetImage(ctx context.Context, studioID int) ([]byte, error) } diff --git a/internal/api/routes_tag.go b/internal/api/routes_tag.go index d8837da80c9..9ccf11a11c9 100644 --- a/internal/api/routes_tag.go +++ b/internal/api/routes_tag.go @@ -11,13 +11,12 @@ import ( "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type TagFinder interface { - tag.Finder + models.TagGetter GetImage(ctx context.Context, tagID int) ([]byte, error) } diff --git a/internal/api/server.go b/internal/api/server.go index 6eec5b524e1..b909914cdfd 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -151,7 +151,7 @@ func Start() error { r.Mount("/scene", sceneRoutes{ txnManager: txnManager, sceneFinder: txnManager.Scene, - fileFinder: txnManager.File, + fileGetter: txnManager.File, captionFinder: txnManager.File, sceneMarkerFinder: txnManager.SceneMarker, tagFinder: txnManager.Tag, @@ -159,7 +159,7 @@ func Start() error { r.Mount("/image", imageRoutes{ txnManager: txnManager, imageFinder: txnManager.Image, - fileFinder: txnManager.File, + fileGetter: txnManager.File, }.Routes()) r.Mount("/studio", studioRoutes{ txnManager: txnManager, diff --git a/internal/api/types.go b/internal/api/types.go index 13d86f975c7..372c094b8b4 100644 --- a/internal/api/types.go +++ b/internal/api/types.go @@ -1,11 +1,9 @@ package api import ( - "fmt" "math" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) // #1572 - Inf and NaN values cause the JSON marshaller to fail @@ -18,40 +16,12 @@ func handleFloat64(v float64) *float64 { return &v } -func handleFloat64Value(v float64) float64 { - if math.IsInf(v, 0) || math.IsNaN(v) { - return 0 - } - - return v -} - -func translateUpdateIDs(strIDs []string, mode models.RelationshipUpdateMode) (*models.UpdateIDs, error) { - ids, err := stringslice.StringSliceToIntSlice(strIDs) - if err != nil { - return nil, fmt.Errorf("converting ids [%v]: %w", strIDs, err) - } - return &models.UpdateIDs{ - IDs: ids, - Mode: mode, - }, nil -} - -func translateSceneMovieIDs(input BulkUpdateIds) (*models.UpdateMovieIDs, error) { - ids, err := stringslice.StringSliceToIntSlice(input.Ids) - if err != nil { - return nil, fmt.Errorf("converting ids [%v]: %w", input.Ids, err) - } - - ret := &models.UpdateMovieIDs{ - Mode: input.Mode, - } - - for _, id := range ids { - ret.Movies = append(ret.Movies, models.MoviesScenes{ - MovieID: id, - }) +func stashIDsSliceToPtrSlice(v []models.StashID) []*models.StashID { + ret := make([]*models.StashID, len(v)) + for i, vv := range v { + c := vv + ret[i] = &c } - return ret, nil + return ret } diff --git a/internal/autotag/gallery.go b/internal/autotag/gallery.go index d2a8c2c5d29..f768a31dd11 100644 --- a/internal/autotag/gallery.go +++ b/internal/autotag/gallery.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type GalleryFinderUpdater interface { + models.GalleryQueryer + models.GalleryUpdater +} + type GalleryPerformerUpdater interface { models.PerformerIDLoader - gallery.PartialUpdater + models.GalleryUpdater } type GalleryTagUpdater interface { models.TagIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { @@ -39,7 +44,7 @@ func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { } // GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path. -func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getGalleryFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -63,7 +68,7 @@ func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerform // GalleryStudios tags the provided gallery with the first studio whose name matches the gallery's path. // // Gallerys will not be tagged if studio is already set. -func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -77,7 +82,7 @@ func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpda } // GalleryTags tags the provided gallery with tags whose name matches the gallery's path. -func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getGalleryFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/gallery_test.go b/internal/autotag/gallery_test.go index b617791abea..23c3d931ee6 100644 --- a/internal/autotag/gallery_test.go +++ b/internal/autotag/gallery_test.go @@ -14,6 +14,19 @@ const galleryExt = "zip" var testCtx = context.Background() +// returns got == expected +// ignores expected.UpdatedAt, but ensures that got.UpdatedAt is set and not null +func galleryPartialsEqual(got, expected models.GalleryPartial) bool { + // updated at should be set and not null + if !got.UpdatedAt.Set || got.UpdatedAt.Null { + return false + } + // else ignore the exact value + got.UpdatedAt = models.OptionalTime{} + + return assert.ObjectsAreEqual(got, expected) +} + func TestGalleryPerformers(t *testing.T) { t.Parallel() @@ -46,12 +59,17 @@ func TestGalleryPerformers(t *testing.T) { mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() if test.Matches { - mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.GalleryPartial) bool { + expected := models.GalleryPartial{ + PerformerIDs: &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return galleryPartialsEqual(got, expected) + }) + mockGalleryReader.On("UpdatePartial", testCtx, galleryID, matchPartial).Return(nil, nil).Once() } gallery := models.Gallery{ @@ -91,10 +109,14 @@ func TestGalleryStudios(t *testing.T) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) { if test.Matches { - expectedStudioID := studioID - mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{ - StudioID: models.NewOptionalInt(expectedStudioID), - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.GalleryPartial) bool { + expected := models.GalleryPartial{ + StudioID: models.NewOptionalInt(studioID), + } + + return galleryPartialsEqual(got, expected) + }) + mockGalleryReader.On("UpdatePartial", testCtx, galleryID, matchPartial).Return(nil, nil).Once() } gallery := models.Gallery{ @@ -162,12 +184,17 @@ func TestGalleryTags(t *testing.T) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) { if test.Matches { - mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.GalleryPartial) bool { + expected := models.GalleryPartial{ + TagIDs: &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return galleryPartialsEqual(got, expected) + }) + mockGalleryReader.On("UpdatePartial", testCtx, galleryID, matchPartial).Return(nil, nil).Once() } gallery := models.Gallery{ diff --git a/internal/autotag/image.go b/internal/autotag/image.go index 404640786d4..d28960f3caf 100644 --- a/internal/autotag/image.go +++ b/internal/autotag/image.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type ImageFinderUpdater interface { + models.ImageQueryer + models.ImageUpdater +} + type ImagePerformerUpdater interface { models.PerformerIDLoader - image.PartialUpdater + models.ImageUpdater } type ImageTagUpdater interface { models.TagIDLoader - image.PartialUpdater + models.ImageUpdater } func getImageFileTagger(s *models.Image, cache *match.Cache) tagger { @@ -30,7 +35,7 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger { } // ImagePerformers tags the provided image with performers whose name matches the image's path. -func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getImageFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -54,7 +59,7 @@ func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpda // ImageStudios tags the provided image with the first studio whose name matches the image's path. // // Images will not be tagged if studio is already set. -func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -68,7 +73,7 @@ func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, s } // ImageTags tags the provided image with tags whose name matches the image's path. -func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getImageFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/image_test.go b/internal/autotag/image_test.go index 3ced047f7e2..06991beea1f 100644 --- a/internal/autotag/image_test.go +++ b/internal/autotag/image_test.go @@ -11,6 +11,19 @@ import ( const imageExt = "jpg" +// returns got == expected +// ignores expected.UpdatedAt, but ensures that got.UpdatedAt is set and not null +func imagePartialsEqual(got, expected models.ImagePartial) bool { + // updated at should be set and not null + if !got.UpdatedAt.Set || got.UpdatedAt.Null { + return false + } + // else ignore the exact value + got.UpdatedAt = models.OptionalTime{} + + return assert.ObjectsAreEqual(got, expected) +} + func TestImagePerformers(t *testing.T) { t.Parallel() @@ -43,12 +56,17 @@ func TestImagePerformers(t *testing.T) { mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once() if test.Matches { - mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.ImagePartial) bool { + expected := models.ImagePartial{ + PerformerIDs: &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return imagePartialsEqual(got, expected) + }) + mockImageReader.On("UpdatePartial", testCtx, imageID, matchPartial).Return(nil, nil).Once() } image := models.Image{ @@ -88,10 +106,14 @@ func TestImageStudios(t *testing.T) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) { if test.Matches { - expectedStudioID := studioID - mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{ - StudioID: models.NewOptionalInt(expectedStudioID), - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.ImagePartial) bool { + expected := models.ImagePartial{ + StudioID: models.NewOptionalInt(studioID), + } + + return imagePartialsEqual(got, expected) + }) + mockImageReader.On("UpdatePartial", testCtx, imageID, matchPartial).Return(nil, nil).Once() } image := models.Image{ @@ -159,12 +181,17 @@ func TestImageTags(t *testing.T) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) { if test.Matches { - mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.ImagePartial) bool { + expected := models.ImagePartial{ + TagIDs: &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return imagePartialsEqual(got, expected) + }) + mockImageReader.On("UpdatePartial", testCtx, imageID, matchPartial).Return(nil, nil).Once() } image := models.Image{ diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go index 1c7b0ee2d55..84ae016987c 100644 --- a/internal/autotag/integration_test.go +++ b/internal/autotag/integration_test.go @@ -10,7 +10,6 @@ import ( "path/filepath" "testing" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sqlite" "github.com/stashapp/stash/pkg/txn" @@ -99,7 +98,7 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error { func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) { // create the studio studio := models.Studio{ - Name: name, + Name: name, } err := qb.Create(ctx, &studio) @@ -124,12 +123,12 @@ func createTag(ctx context.Context, qb models.TagWriter) error { return nil } -func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the scenes scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt) for _, fn := range scenePatterns { - f, err := createSceneFile(ctx, fn, folderStore, fileStore) + f, err := createSceneFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -141,7 +140,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore } for _, fn := range falseScenePatterns { - f, err := createSceneFile(ctx, fn, folderStore, fileStore) + f, err := createSceneFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -154,7 +153,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore // add organized scenes for _, fn := range scenePatterns { - f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -168,7 +167,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore } // create scene with existing studio io - f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileStore) + f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileCreator) if err != nil { return err } @@ -196,7 +195,7 @@ func makeScene(expectedResult bool) *models.Scene { return s } -func createSceneFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.VideoFile, error) { +func createSceneFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.VideoFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -207,21 +206,21 @@ func createSceneFile(ctx context.Context, name string, folderStore file.FolderSt folderID := folder.ID - f := &file.VideoFile{ - BaseFile: &file.BaseFile{ + f := &models.VideoFile{ + BaseFile: &models.BaseFile{ Basename: basename, ParentFolderID: folderID, }, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, fmt.Errorf("creating scene file %q: %w", name, err) } return f, nil } -func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folderPath string) (*file.Folder, error) { +func getOrCreateFolder(ctx context.Context, folderStore models.FolderFinderCreator, folderPath string) (*models.Folder, error) { f, err := folderStore.FindByPath(ctx, folderPath) if err != nil { return nil, fmt.Errorf("getting folder by path: %w", err) @@ -231,7 +230,7 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder return f, nil } - var parentID file.FolderID + var parentID models.FolderID dir := filepath.Dir(folderPath) if dir != "." { parent, err := getOrCreateFolder(ctx, folderStore, dir) @@ -242,7 +241,7 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder parentID = parent.ID } - f = &file.Folder{ + f = &models.Folder{ Path: folderPath, } @@ -257,8 +256,8 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder return f, nil } -func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *file.VideoFile) error { - err := sqb.Create(ctx, s, []file.ID{f.ID}) +func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *models.VideoFile) error { + err := sqb.Create(ctx, s, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Failed to create scene with path '%s': %s", f.Path, err.Error()) @@ -267,12 +266,12 @@ func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f return nil } -func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the images imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt) for _, fn := range imagePatterns { - f, err := createImageFile(ctx, fn, folderStore, fileStore) + f, err := createImageFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -283,7 +282,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f } } for _, fn := range falseImagePatterns { - f, err := createImageFile(ctx, fn, folderStore, fileStore) + f, err := createImageFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -296,7 +295,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f // add organized images for _, fn := range imagePatterns { - f, err := createImageFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createImageFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -310,7 +309,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f } // create image with existing studio io - f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileStore) + f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileCreator) if err != nil { return err } @@ -326,7 +325,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f return nil } -func createImageFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.ImageFile, error) { +func createImageFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.ImageFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -337,14 +336,14 @@ func createImageFile(ctx context.Context, name string, folderStore file.FolderSt folderID := folder.ID - f := &file.ImageFile{ - BaseFile: &file.BaseFile{ + f := &models.ImageFile{ + BaseFile: &models.BaseFile{ Basename: basename, ParentFolderID: folderID, }, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, err } @@ -362,11 +361,8 @@ func makeImage(expectedResult bool) *models.Image { return o } -func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *file.ImageFile) error { - err := w.Create(ctx, &models.ImageCreateInput{ - Image: o, - FileIDs: []file.ID{f.ID}, - }) +func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *models.ImageFile) error { + err := w.Create(ctx, o, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Failed to create image with path '%s': %s", f.Path, err.Error()) @@ -375,12 +371,12 @@ func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f * return nil } -func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the galleries galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt) for _, fn := range galleryPatterns { - f, err := createGalleryFile(ctx, fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -391,7 +387,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt } } for _, fn := range falseGalleryPatterns { - f, err := createGalleryFile(ctx, fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -404,7 +400,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt // add organized galleries for _, fn := range galleryPatterns { - f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -418,7 +414,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt } // create gallery with existing studio io - f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileStore) + f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileCreator) if err != nil { return err } @@ -434,7 +430,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt return nil } -func createGalleryFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.BaseFile, error) { +func createGalleryFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.BaseFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -445,12 +441,12 @@ func createGalleryFile(ctx context.Context, name string, folderStore file.Folder folderID := folder.ID - f := &file.BaseFile{ + f := &models.BaseFile{ Basename: basename, ParentFolderID: folderID, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, err } @@ -468,8 +464,8 @@ func makeGallery(expectedResult bool) *models.Gallery { return o } -func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *file.BaseFile) error { - err := w.Create(ctx, o, []file.ID{f.ID}) +func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *models.BaseFile) error { + err := w.Create(ctx, o, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error()) } diff --git a/internal/autotag/performer.go b/internal/autotag/performer.go index 32364dc5099..cc839f361c6 100644 --- a/internal/autotag/performer.go +++ b/internal/autotag/performer.go @@ -13,21 +13,21 @@ import ( ) type SceneQueryPerformerUpdater interface { - scene.Queryer + models.SceneQueryer models.PerformerIDLoader - scene.PartialUpdater + models.SceneUpdater } type ImageQueryPerformerUpdater interface { - image.Queryer + models.ImageQueryer models.PerformerIDLoader - image.PartialUpdater + models.ImageUpdater } type GalleryQueryPerformerUpdater interface { - gallery.Queryer + models.GalleryQueryer models.PerformerIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getPerformerTaggers(p *models.Performer, cache *match.Cache) []tagger { diff --git a/internal/autotag/performer_test.go b/internal/autotag/performer_test.go index 5f7b12c228d..aa0a43d92f8 100644 --- a/internal/autotag/performer_test.go +++ b/internal/autotag/performer_test.go @@ -89,12 +89,18 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) { for i := range matchingPaths { sceneID := i + 1 - mockSceneReader.On("UpdatePartial", mock.Anything, sceneID, models.ScenePartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + + matchPartial := mock.MatchedBy(func(got models.ScenePartial) bool { + expected := models.ScenePartial{ + PerformerIDs: &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return scenePartialsEqual(got, expected) + }) + mockSceneReader.On("UpdatePartial", mock.Anything, sceneID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ @@ -178,12 +184,18 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) { for i := range matchingPaths { imageID := i + 1 - mockImageReader.On("UpdatePartial", mock.Anything, imageID, models.ImagePartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + + matchPartial := mock.MatchedBy(func(got models.ImagePartial) bool { + expected := models.ImagePartial{ + PerformerIDs: &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return imagePartialsEqual(got, expected) + }) + mockImageReader.On("UpdatePartial", mock.Anything, imageID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ @@ -267,12 +279,18 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) { for i := range matchingPaths { galleryID := i + 1 - mockGalleryReader.On("UpdatePartial", mock.Anything, galleryID, models.GalleryPartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + + matchPartial := mock.MatchedBy(func(got models.GalleryPartial) bool { + expected := models.GalleryPartial{ + PerformerIDs: &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return galleryPartialsEqual(got, expected) + }) + mockGalleryReader.On("UpdatePartial", mock.Anything, galleryID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ diff --git a/internal/autotag/scene.go b/internal/autotag/scene.go index 285ff7d7dde..6095905e812 100644 --- a/internal/autotag/scene.go +++ b/internal/autotag/scene.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type SceneFinderUpdater interface { + models.SceneQueryer + models.SceneUpdater +} + type ScenePerformerUpdater interface { models.PerformerIDLoader - scene.PartialUpdater + models.SceneUpdater } type SceneTagUpdater interface { models.TagIDLoader - scene.PartialUpdater + models.SceneUpdater } func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger { @@ -30,7 +35,7 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger { } // ScenePerformers tags the provided scene with performers whose name matches the scene's path. -func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getSceneFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -54,7 +59,7 @@ func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpda // SceneStudios tags the provided scene with the first studio whose name matches the scene's path. // // Scenes will not be tagged if studio is already set. -func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -68,7 +73,7 @@ func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, s } // SceneTags tags the provided scene with tags whose name matches the scene's path. -func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getSceneFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/scene_test.go b/internal/autotag/scene_test.go index 19ae15c9cce..a714c364c41 100644 --- a/internal/autotag/scene_test.go +++ b/internal/autotag/scene_test.go @@ -29,6 +29,19 @@ var testEndSeparators = []string{ ",", } +// asserts that got == expected +// ignores expected.UpdatedAt, but ensures that got.UpdatedAt is set and not null +func scenePartialsEqual(got, expected models.ScenePartial) bool { + // updated at should be set and not null + if !got.UpdatedAt.Set || got.UpdatedAt.Null { + return false + } + // else ignore the exact value + got.UpdatedAt = models.OptionalTime{} + + return assert.ObjectsAreEqual(got, expected) +} + func generateNamePatterns(name, separator, ext string) []string { var ret []string ret = append(ret, fmt.Sprintf("%s%saaa.%s", name, separator, ext)) @@ -182,12 +195,17 @@ func TestScenePerformers(t *testing.T) { } if test.Matches { - mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.ScenePartial) bool { + expected := models.ScenePartial{ + PerformerIDs: &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return scenePartialsEqual(got, expected) + }) + mockSceneReader.On("UpdatePartial", testCtx, sceneID, matchPartial).Return(nil, nil).Once() } err := ScenePerformers(testCtx, &scene, mockSceneReader, mockPerformerReader, nil) @@ -224,10 +242,14 @@ func TestSceneStudios(t *testing.T) { doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) { if test.Matches { - expectedStudioID := studioID - mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{ - StudioID: models.NewOptionalInt(expectedStudioID), - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.ScenePartial) bool { + expected := models.ScenePartial{ + StudioID: models.NewOptionalInt(studioID), + } + + return scenePartialsEqual(got, expected) + }) + mockSceneReader.On("UpdatePartial", testCtx, sceneID, matchPartial).Return(nil, nil).Once() } scene := models.Scene{ @@ -295,12 +317,17 @@ func TestSceneTags(t *testing.T) { doTest := func(mockTagReader *mocks.TagReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) { if test.Matches { - mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.ScenePartial) bool { + expected := models.ScenePartial{ + TagIDs: &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return scenePartialsEqual(got, expected) + }) + mockSceneReader.On("UpdatePartial", testCtx, sceneID, matchPartial).Return(nil, nil).Once() } scene := models.Scene{ diff --git a/internal/autotag/studio.go b/internal/autotag/studio.go index bfa6c941e64..8312e0edf61 100644 --- a/internal/autotag/studio.go +++ b/internal/autotag/studio.go @@ -3,27 +3,23 @@ package autotag import ( "context" - "github.com/stashapp/stash/pkg/gallery" - "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) // the following functions aren't used in Tagger because they assume // use within a transaction -func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *models.Scene, studioID int) (bool, error) { +func addSceneStudio(ctx context.Context, sceneWriter models.SceneUpdater, o *models.Scene, studioID int) (bool, error) { // don't set if already set if o.StudioID != nil { return false, nil } // set the studio id - scenePartial := models.ScenePartial{ - StudioID: models.NewOptionalInt(studioID), - } + scenePartial := models.NewScenePartial() + scenePartial.StudioID = models.NewOptionalInt(studioID) if _, err := sceneWriter.UpdatePartial(ctx, o.ID, scenePartial); err != nil { return false, err @@ -31,16 +27,15 @@ func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *mo return true, nil } -func addImageStudio(ctx context.Context, imageWriter image.PartialUpdater, i *models.Image, studioID int) (bool, error) { +func addImageStudio(ctx context.Context, imageWriter models.ImageUpdater, i *models.Image, studioID int) (bool, error) { // don't set if already set if i.StudioID != nil { return false, nil } // set the studio id - imagePartial := models.ImagePartial{ - StudioID: models.NewOptionalInt(studioID), - } + imagePartial := models.NewImagePartial() + imagePartial.StudioID = models.NewOptionalInt(studioID) if _, err := imageWriter.UpdatePartial(ctx, i.ID, imagePartial); err != nil { return false, err @@ -55,9 +50,8 @@ func addGalleryStudio(ctx context.Context, galleryWriter GalleryFinderUpdater, o } // set the studio id - galleryPartial := models.GalleryPartial{ - StudioID: models.NewOptionalInt(studioID), - } + galleryPartial := models.NewGalleryPartial() + galleryPartial.StudioID = models.NewOptionalInt(studioID) if _, err := galleryWriter.UpdatePartial(ctx, o.ID, galleryPartial); err != nil { return false, err @@ -84,11 +78,6 @@ func getStudioTagger(p *models.Studio, aliases []string, cache *match.Cache) []t return ret } -type SceneFinderUpdater interface { - scene.Queryer - scene.PartialUpdater -} - // StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene. func (tagger *Tagger) StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw SceneFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) @@ -101,9 +90,8 @@ func (tagger *Tagger) StudioScenes(ctx context.Context, p *models.Studio, paths } // set the studio id - scenePartial := models.ScenePartial{ - StudioID: models.NewOptionalInt(p.ID), - } + scenePartial := models.NewScenePartial() + scenePartial.StudioID = models.NewOptionalInt(p.ID) if err := txn.WithTxn(ctx, tagger.TxnManager, func(ctx context.Context) error { _, err := rw.UpdatePartial(ctx, o.ID, scenePartial) @@ -120,12 +108,6 @@ func (tagger *Tagger) StudioScenes(ctx context.Context, p *models.Studio, paths return nil } -type ImageFinderUpdater interface { - image.Queryer - Find(ctx context.Context, id int) (*models.Image, error) - UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) -} - // StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image. func (tagger *Tagger) StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw ImageFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) @@ -138,9 +120,8 @@ func (tagger *Tagger) StudioImages(ctx context.Context, p *models.Studio, paths } // set the studio id - imagePartial := models.ImagePartial{ - StudioID: models.NewOptionalInt(p.ID), - } + imagePartial := models.NewImagePartial() + imagePartial.StudioID = models.NewOptionalInt(p.ID) if err := txn.WithTxn(ctx, tagger.TxnManager, func(ctx context.Context) error { _, err := rw.UpdatePartial(ctx, i.ID, imagePartial) @@ -157,12 +138,6 @@ func (tagger *Tagger) StudioImages(ctx context.Context, p *models.Studio, paths return nil } -type GalleryFinderUpdater interface { - gallery.Queryer - gallery.PartialUpdater - Find(ctx context.Context, id int) (*models.Gallery, error) -} - // StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery. func (tagger *Tagger) StudioGalleries(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw GalleryFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) @@ -175,9 +150,8 @@ func (tagger *Tagger) StudioGalleries(ctx context.Context, p *models.Studio, pat } // set the studio id - galleryPartial := models.GalleryPartial{ - StudioID: models.NewOptionalInt(p.ID), - } + galleryPartial := models.NewGalleryPartial() + galleryPartial.StudioID = models.NewOptionalInt(p.ID) if err := txn.WithTxn(ctx, tagger.TxnManager, func(ctx context.Context) error { _, err := rw.UpdatePartial(ctx, o.ID, galleryPartial) diff --git a/internal/autotag/studio_test.go b/internal/autotag/studio_test.go index 3e9eae5f5fb..aa52c9c5179 100644 --- a/internal/autotag/studio_test.go +++ b/internal/autotag/studio_test.go @@ -151,10 +151,15 @@ func testStudioScenes(t *testing.T, tc testStudioCase) { for i := range matchingPaths { sceneID := i + 1 - expectedStudioID := studioID - mockSceneReader.On("UpdatePartial", mock.Anything, sceneID, models.ScenePartial{ - StudioID: models.NewOptionalInt(expectedStudioID), - }).Return(nil, nil).Once() + + matchPartial := mock.MatchedBy(func(got models.ScenePartial) bool { + expected := models.ScenePartial{ + StudioID: models.NewOptionalInt(studioID), + } + + return scenePartialsEqual(got, expected) + }) + mockSceneReader.On("UpdatePartial", mock.Anything, sceneID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ @@ -249,10 +254,15 @@ func testStudioImages(t *testing.T, tc testStudioCase) { for i := range matchingPaths { imageID := i + 1 - expectedStudioID := studioID - mockImageReader.On("UpdatePartial", mock.Anything, imageID, models.ImagePartial{ - StudioID: models.NewOptionalInt(expectedStudioID), - }).Return(nil, nil).Once() + + matchPartial := mock.MatchedBy(func(got models.ImagePartial) bool { + expected := models.ImagePartial{ + StudioID: models.NewOptionalInt(studioID), + } + + return imagePartialsEqual(got, expected) + }) + mockImageReader.On("UpdatePartial", mock.Anything, imageID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ @@ -346,10 +356,15 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) { for i := range matchingPaths { galleryID := i + 1 - expectedStudioID := studioID - mockGalleryReader.On("UpdatePartial", mock.Anything, galleryID, models.GalleryPartial{ - StudioID: models.NewOptionalInt(expectedStudioID), - }).Return(nil, nil).Once() + + matchPartial := mock.MatchedBy(func(got models.GalleryPartial) bool { + expected := models.GalleryPartial{ + StudioID: models.NewOptionalInt(studioID), + } + + return galleryPartialsEqual(got, expected) + }) + mockGalleryReader.On("UpdatePartial", mock.Anything, galleryID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ diff --git a/internal/autotag/tag.go b/internal/autotag/tag.go index 94c7c1bb335..8c404f62f28 100644 --- a/internal/autotag/tag.go +++ b/internal/autotag/tag.go @@ -13,21 +13,21 @@ import ( ) type SceneQueryTagUpdater interface { - scene.Queryer + models.SceneQueryer models.TagIDLoader - scene.PartialUpdater + models.SceneUpdater } type ImageQueryTagUpdater interface { - image.Queryer + models.ImageQueryer models.TagIDLoader - image.PartialUpdater + models.ImageUpdater } type GalleryQueryTagUpdater interface { - gallery.Queryer + models.GalleryQueryer models.TagIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger { diff --git a/internal/autotag/tag_test.go b/internal/autotag/tag_test.go index 04f10875c2e..4b183200490 100644 --- a/internal/autotag/tag_test.go +++ b/internal/autotag/tag_test.go @@ -151,12 +151,18 @@ func testTagScenes(t *testing.T, tc testTagCase) { for i := range matchingPaths { sceneID := i + 1 - mockSceneReader.On("UpdatePartial", mock.Anything, sceneID, models.ScenePartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + + matchPartial := mock.MatchedBy(func(got models.ScenePartial) bool { + expected := models.ScenePartial{ + TagIDs: &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return scenePartialsEqual(got, expected) + }) + mockSceneReader.On("UpdatePartial", mock.Anything, sceneID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ @@ -253,12 +259,17 @@ func testTagImages(t *testing.T, tc testTagCase) { for i := range matchingPaths { imageID := i + 1 - mockImageReader.On("UpdatePartial", mock.Anything, imageID, models.ImagePartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.ImagePartial) bool { + expected := models.ImagePartial{ + TagIDs: &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return imagePartialsEqual(got, expected) + }) + mockImageReader.On("UpdatePartial", mock.Anything, imageID, matchPartial).Return(nil, nil).Once() } tagger := Tagger{ @@ -355,12 +366,17 @@ func testTagGalleries(t *testing.T, tc testTagCase) { for i := range matchingPaths { galleryID := i + 1 - mockGalleryReader.On("UpdatePartial", mock.Anything, galleryID, models.GalleryPartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }).Return(nil, nil).Once() + matchPartial := mock.MatchedBy(func(got models.GalleryPartial) bool { + expected := models.GalleryPartial{ + TagIDs: &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + }, + } + + return galleryPartialsEqual(got, expected) + }) + mockGalleryReader.On("UpdatePartial", mock.Anything, galleryID, matchPartial).Return(nil, nil).Once() } diff --git a/internal/autotag/tagger.go b/internal/autotag/tagger.go index 07cb1da87d3..b814bea608f 100644 --- a/internal/autotag/tagger.go +++ b/internal/autotag/tagger.go @@ -17,12 +17,9 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/gallery" - "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) @@ -54,7 +51,7 @@ func (t *tagger) addLog(otherType, otherName string) { logger.Infof("Added %s '%s' to %s '%s'", otherType, otherName, t.Type, t.Name) } -func (t *tagger) tagPerformers(ctx context.Context, performerReader match.PerformerAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagPerformers(ctx context.Context, performerReader models.PerformerAutoTagQueryer, addFunc addLinkFunc) error { others, err := match.PathToPerformers(ctx, t.Path, performerReader, t.cache, t.trimExt) if err != nil { return err @@ -75,7 +72,7 @@ func (t *tagger) tagPerformers(ctx context.Context, performerReader match.Perfor return nil } -func (t *tagger) tagStudios(ctx context.Context, studioReader match.StudioAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagStudios(ctx context.Context, studioReader models.StudioAutoTagQueryer, addFunc addLinkFunc) error { studio, err := match.PathToStudio(ctx, t.Path, studioReader, t.cache, t.trimExt) if err != nil { return err @@ -96,7 +93,7 @@ func (t *tagger) tagStudios(ctx context.Context, studioReader match.StudioAutoTa return nil } -func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagTags(ctx context.Context, tagReader models.TagAutoTagQueryer, addFunc addLinkFunc) error { others, err := match.PathToTags(ctx, t.Path, tagReader, t.cache, t.trimExt) if err != nil { return err @@ -117,7 +114,7 @@ func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer, return nil } -func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addSceneLinkFunc) error { +func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader models.SceneQueryer, addFunc addSceneLinkFunc) error { return match.PathToScenesFn(ctx, t.Name, paths, sceneReader, func(ctx context.Context, p *models.Scene) error { added, err := addFunc(p) @@ -133,7 +130,7 @@ func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scen }) } -func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addImageLinkFunc) error { +func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader models.ImageQueryer, addFunc addImageLinkFunc) error { return match.PathToImagesFn(ctx, t.Name, paths, imageReader, func(ctx context.Context, p *models.Image) error { added, err := addFunc(p) @@ -149,7 +146,7 @@ func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader imag }) } -func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addGalleryLinkFunc) error { +func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader models.GalleryQueryer, addFunc addGalleryLinkFunc) error { return match.PathToGalleriesFn(ctx, t.Name, paths, galleryReader, func(ctx context.Context, p *models.Gallery) error { added, err := addFunc(p) diff --git a/internal/dlna/cds.go b/internal/dlna/cds.go index 826b52acd66..eba98ac489f 100644 --- a/internal/dlna/cds.go +++ b/internal/dlna/cds.go @@ -363,7 +363,7 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string) if err := txn.WithReadTxn(context.TODO(), me.txnManager, func(ctx context.Context) error { scene, err = me.repository.SceneFinder.Find(ctx, sceneID) if scene != nil { - err = scene.LoadPrimaryFile(ctx, me.repository.FileFinder) + err = scene.LoadPrimaryFile(ctx, me.repository.FileGetter) } if err != nil { @@ -478,7 +478,7 @@ func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType } } else { for _, s := range scenes { - if err := s.LoadPrimaryFile(ctx, me.repository.FileFinder); err != nil { + if err := s.LoadPrimaryFile(ctx, me.repository.FileGetter); err != nil { return err } @@ -506,7 +506,7 @@ func (me *contentDirectoryService) getPageVideos(sceneFilter *models.SceneFilter sort := me.VideoSortOrder direction := getSortDirection(sceneFilter, sort) var err error - objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileFinder, page, host, sort, direction) + objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileGetter, page, host, sort, direction) if err != nil { return err } diff --git a/internal/dlna/dms.go b/internal/dlna/dms.go index 502dbe0e44e..fe078aab022 100644 --- a/internal/dlna/dms.go +++ b/internal/dlna/dms.go @@ -48,13 +48,12 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) type SceneFinder interface { - scene.Queryer - scene.IDFinder + models.SceneGetter + models.SceneQueryer } type StudioFinder interface { diff --git a/internal/dlna/paging.go b/internal/dlna/paging.go index bd1b0028375..fae6ebf1360 100644 --- a/internal/dlna/paging.go +++ b/internal/dlna/paging.go @@ -6,7 +6,6 @@ import ( "math" "strconv" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" ) @@ -20,7 +19,7 @@ func (p *scenePager) getPageID(page int) string { return p.parentID + "/page/" + strconv.Itoa(page) } -func (p *scenePager) getPages(ctx context.Context, r scene.Queryer, total int) ([]interface{}, error) { +func (p *scenePager) getPages(ctx context.Context, r models.SceneQueryer, total int) ([]interface{}, error) { var objs []interface{} // get the first scene of each page to set an appropriate title @@ -60,7 +59,7 @@ func (p *scenePager) getPages(ctx context.Context, r scene.Queryer, total int) ( return objs, nil } -func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f file.Finder, page int, host string, sort string, direction models.SortDirectionEnum) ([]interface{}, error) { +func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f models.FileGetter, page int, host string, sort string, direction models.SortDirectionEnum) ([]interface{}, error) { var objs []interface{} findFilter := &models.FindFilterType{ diff --git a/internal/dlna/service.go b/internal/dlna/service.go index 0d8932e0803..d5399e6a11e 100644 --- a/internal/dlna/service.go +++ b/internal/dlna/service.go @@ -8,7 +8,6 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -16,7 +15,7 @@ import ( type Repository struct { SceneFinder SceneFinder - FileFinder file.Finder + FileGetter models.FileGetter StudioFinder StudioFinder TagFinder TagFinder PerformerFinder PerformerFinder diff --git a/internal/identify/identify.go b/internal/identify/identify.go index 3a9cea6107e..db8ca2f54ab 100644 --- a/internal/identify/identify.go +++ b/internal/identify/identify.go @@ -46,7 +46,7 @@ type SceneIdentifier struct { SceneReaderUpdater SceneReaderUpdater StudioReaderWriter models.StudioReaderWriter PerformerCreator PerformerCreator - TagCreatorFinder TagCreatorFinder + TagFinderCreator models.TagFinderCreator DefaultOptions *MetadataOptions Sources []ScraperSource @@ -176,7 +176,7 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene, sceneReader: t.SceneReaderUpdater, studioReaderWriter: t.StudioReaderWriter, performerCreator: t.PerformerCreator, - tagCreatorFinder: t.TagCreatorFinder, + tagCreator: t.TagFinderCreator, scene: s, result: result, fieldOptions: fieldOptions, @@ -332,7 +332,7 @@ func (t *SceneIdentifier) addTagToScene(ctx context.Context, txnManager txn.Mana return err } - ret, err := t.TagCreatorFinder.Find(ctx, tagID) + ret, err := t.TagFinderCreator.Find(ctx, tagID) if err != nil { logger.Infof("Added tag id %s to skipped scene %s", tagToAdd, s.Path) } else { diff --git a/internal/identify/identify_test.go b/internal/identify/identify_test.go index 30dd72803fb..04ff0360765 100644 --- a/internal/identify/identify_test.go +++ b/internal/identify/identify_test.go @@ -186,7 +186,7 @@ func TestSceneIdentifier_Identify(t *testing.T) { t.Run(tt.name, func(t *testing.T) { identifier := SceneIdentifier{ SceneReaderUpdater: mockSceneReaderWriter, - TagCreatorFinder: mockTagFinderCreator, + TagFinderCreator: mockTagFinderCreator, DefaultOptions: defaultOptions, Sources: sources, SceneUpdatePostHookExecutor: mockHookExecutor{}, diff --git a/internal/identify/performer.go b/internal/identify/performer.go index f544473d2b2..947bb09d6f8 100644 --- a/internal/identify/performer.go +++ b/internal/identify/performer.go @@ -10,7 +10,7 @@ import ( ) type PerformerCreator interface { - Create(ctx context.Context, newPerformer *models.Performer) error + models.PerformerCreator UpdateImage(ctx context.Context, performerID int, image []byte) error } diff --git a/internal/identify/scene.go b/internal/identify/scene.go index 160a0a8b646..eec8ce6edc2 100644 --- a/internal/identify/scene.go +++ b/internal/identify/scene.go @@ -7,36 +7,32 @@ import ( "fmt" "strconv" "strings" - "time" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/sliceutil/intslice" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type SceneReaderUpdater interface { +type SceneCoverGetter interface { GetCover(ctx context.Context, sceneID int) ([]byte, error) - scene.Updater +} + +type SceneReaderUpdater interface { + SceneCoverGetter + models.SceneUpdater models.PerformerIDLoader models.TagIDLoader models.StashIDLoader models.URLLoader } -type TagCreatorFinder interface { - Create(ctx context.Context, newTag *models.Tag) error - tag.Finder -} - type sceneRelationships struct { - sceneReader SceneReaderUpdater + sceneReader SceneCoverGetter studioReaderWriter models.StudioReaderWriter performerCreator PerformerCreator - tagCreatorFinder TagCreatorFinder + tagCreator models.TagCreator scene *models.Scene result *scrapeResult fieldOptions map[string]*FieldOptions @@ -167,13 +163,10 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) { tagIDs = intslice.IntAppendUnique(tagIDs, int(tagID)) } else if createMissing { - now := time.Now() - newTag := models.Tag{ - Name: t.Name, - CreatedAt: now, - UpdatedAt: now, - } - err := g.tagCreatorFinder.Create(ctx, &newTag) + newTag := models.NewTag() + newTag.Name = t.Name + + err := g.tagCreator.Create(ctx, &newTag) if err != nil { return nil, fmt.Errorf("error creating tag: %w", err) } diff --git a/internal/identify/scene_test.go b/internal/identify/scene_test.go index ae6963ee379..bb0598b060a 100644 --- a/internal/identify/scene_test.go +++ b/internal/identify/scene_test.go @@ -377,9 +377,9 @@ func Test_sceneRelationships_tags(t *testing.T) { })).Return(errors.New("error creating tag")) tr := sceneRelationships{ - sceneReader: mockSceneReaderWriter, - tagCreatorFinder: mockTagReaderWriter, - fieldOptions: make(map[string]*FieldOptions), + sceneReader: mockSceneReaderWriter, + tagCreator: mockTagReaderWriter, + fieldOptions: make(map[string]*FieldOptions), } tests := []struct { diff --git a/internal/identify/studio.go b/internal/identify/studio.go index c822afa991e..d05967bc4f2 100644 --- a/internal/identify/studio.go +++ b/internal/identify/studio.go @@ -39,7 +39,13 @@ func createMissingStudio(ctx context.Context, endpoint string, w models.StudioRe s.Parent.StoredID = &storedId } else { // The parent studio matched an existing one and the user has chosen in the UI to link and/or update it - existingStashIDs := getStashIDsForStudio(ctx, *s.Parent.StoredID, w) + storedID, _ := strconv.Atoi(*s.Parent.StoredID) + + existingStashIDs, err := w.GetStashIDs(ctx, storedID) + if err != nil { + return nil, err + } + studioPartial := s.Parent.ToPartial(s.Parent.StoredID, endpoint, nil, existingStashIDs) parentImage, err := s.Parent.GetImage(ctx, nil) if err != nil { @@ -83,14 +89,3 @@ func createMissingStudio(ctx context.Context, endpoint string, w models.StudioRe return &newStudio.ID, nil } - -func getStashIDsForStudio(ctx context.Context, studioID string, w models.StudioReaderWriter) []models.StashID { - id, _ := strconv.Atoi(studioID) - tempStudio := &models.Studio{ID: id} - - err := tempStudio.LoadStashIDs(ctx, w) - if err != nil { - return nil - } - return tempStudio.StashIDs.List() -} diff --git a/internal/manager/fingerprint.go b/internal/manager/fingerprint.go index fc183cc6a1b..b30ac453263 100644 --- a/internal/manager/fingerprint.go +++ b/internal/manager/fingerprint.go @@ -10,13 +10,14 @@ import ( "github.com/stashapp/stash/pkg/hash/md5" "github.com/stashapp/stash/pkg/hash/oshash" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) type fingerprintCalculator struct { Config *config.Instance } -func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) (*file.Fingerprint, error) { +func (c *fingerprintCalculator) calculateOshash(f *models.BaseFile, o file.Opener) (*models.Fingerprint, error) { r, err := o.Open() if err != nil { return nil, fmt.Errorf("opening file: %w", err) @@ -34,13 +35,13 @@ func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) return nil, fmt.Errorf("calculating oshash: %w", err) } - return &file.Fingerprint{ - Type: file.FingerprintTypeOshash, + return &models.Fingerprint{ + Type: models.FingerprintTypeOshash, Fingerprint: hash, }, nil } -func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, error) { +func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*models.Fingerprint, error) { r, err := o.Open() if err != nil { return nil, fmt.Errorf("opening file: %w", err) @@ -53,24 +54,24 @@ func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, return nil, fmt.Errorf("calculating md5: %w", err) } - return &file.Fingerprint{ - Type: file.FingerprintTypeMD5, + return &models.Fingerprint{ + Type: models.FingerprintTypeMD5, Fingerprint: hash, }, nil } -func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener, useExisting bool) ([]file.Fingerprint, error) { - var ret []file.Fingerprint +func (c *fingerprintCalculator) CalculateFingerprints(f *models.BaseFile, o file.Opener, useExisting bool) ([]models.Fingerprint, error) { + var ret []models.Fingerprint calculateMD5 := true if useAsVideo(f.Path) { var ( - fp *file.Fingerprint + fp *models.Fingerprint err error ) if useExisting { - fp = f.Fingerprints.For(file.FingerprintTypeOshash) + fp = f.Fingerprints.For(models.FingerprintTypeOshash) } if fp == nil { @@ -89,12 +90,12 @@ func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.O if calculateMD5 { var ( - fp *file.Fingerprint + fp *models.Fingerprint err error ) if useExisting { - fp = f.Fingerprints.For(file.FingerprintTypeMD5) + fp = f.Fingerprints.For(models.FingerprintTypeMD5) } if fp == nil { diff --git a/internal/manager/manager.go b/internal/manager/manager.go index 0b1c50abe42..e199f9ce78a 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -26,6 +26,7 @@ import ( "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/scene" @@ -222,7 +223,7 @@ func initialize() error { instance.DLNAService = dlna.NewService(instance.Repository, dlna.Repository{ SceneFinder: instance.Repository.Scene, - FileFinder: instance.Repository.File, + FileGetter: instance.Repository.File, StudioFinder: instance.Repository.Studio, TagFinder: instance.Repository.Tag, PerformerFinder: instance.Repository.Performer, @@ -280,15 +281,15 @@ func initialize() error { return nil } -func videoFileFilter(ctx context.Context, f file.File) bool { +func videoFileFilter(ctx context.Context, f models.File) bool { return useAsVideo(f.Base().Path) } -func imageFileFilter(ctx context.Context, f file.File) bool { +func imageFileFilter(ctx context.Context, f models.File) bool { return useAsImage(f.Base().Path) } -func galleryFileFilter(ctx context.Context, f file.File) bool { +func galleryFileFilter(ctx context.Context, f models.File) bool { return isZip(f.Base().Basename) } @@ -297,7 +298,7 @@ func makeScanner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Scanner { Repository: file.Repository{ Manager: db, DatabaseProvider: db, - Store: db.File, + FileStore: db.File, FolderStore: db.Folder, }, FileDecorators: []file.Decorator{ @@ -325,7 +326,7 @@ func makeCleaner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Cleaner { Repository: file.Repository{ Manager: db, DatabaseProvider: db, - Store: db.File, + FileStore: db.File, FolderStore: db.Folder, }, Handlers: []file.CleanHandler{ diff --git a/internal/manager/manager_tasks.go b/internal/manager/manager_tasks.go index e69dccf1dfa..ed4eea17116 100644 --- a/internal/manager/manager_tasks.go +++ b/internal/manager/manager_tasks.go @@ -383,8 +383,8 @@ func (s *Manager) StashBoxBatchPerformerTag(ctx context.Context, input StashBoxB } // Check if the user wants to refresh existing or new items - if (input.Refresh && len(performer.StashIDs.List()) > 0) || - (!input.Refresh && len(performer.StashIDs.List()) == 0) { + hasStashID := performer.StashIDs.ForEndpoint(box.Endpoint) != nil + if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { tasks = append(tasks, StashBoxBatchTagTask{ performer: performer, refresh: input.Refresh, @@ -516,8 +516,8 @@ func (s *Manager) StashBoxBatchStudioTag(ctx context.Context, input StashBoxBatc } // Check if the user wants to refresh existing or new items - if (input.Refresh && len(studio.StashIDs.List()) > 0) || - (!input.Refresh && len(studio.StashIDs.List()) == 0) { + hasStashID := studio.StashIDs.ForEndpoint(box.Endpoint) != nil + if (input.Refresh && hasStashID) || (!input.Refresh && !hasStashID) { tasks = append(tasks, StashBoxBatchTagTask{ studio: studio, refresh: input.Refresh, diff --git a/internal/manager/repository.go b/internal/manager/repository.go index f6f8176aa86..77859d06baa 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -3,8 +3,6 @@ package manager import ( "context" - "github.com/stashapp/stash/pkg/file" - "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" @@ -12,49 +10,17 @@ import ( "github.com/stashapp/stash/pkg/txn" ) -type ImageReaderWriter interface { - models.ImageReaderWriter - image.FinderCreatorUpdater - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type GalleryReaderWriter interface { - models.GalleryReaderWriter - gallery.FinderCreatorUpdater - gallery.Finder - models.FileLoader - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type SceneReaderWriter interface { - models.SceneReaderWriter - scene.CreatorUpdater - models.URLLoader - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type FileReaderWriter interface { - file.Store - Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error) - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) - IsPrimary(ctx context.Context, fileID file.ID) (bool, error) -} - -type FolderReaderWriter interface { - file.FolderStore -} - type Repository struct { models.TxnManager - File FileReaderWriter - Folder FolderReaderWriter - Gallery GalleryReaderWriter + File models.FileReaderWriter + Folder models.FolderReaderWriter + Gallery models.GalleryReaderWriter GalleryChapter models.GalleryChapterReaderWriter - Image ImageReaderWriter + Image models.ImageReaderWriter Movie models.MovieReaderWriter Performer models.PerformerReaderWriter - Scene SceneReaderWriter + Scene models.SceneReaderWriter SceneMarker models.SceneMarkerReaderWriter Studio models.StudioReaderWriter Tag models.TagReaderWriter @@ -94,15 +60,15 @@ func sqliteRepository(d *sqlite.Database) Repository { } type SceneService interface { - Create(ctx context.Context, input *models.Scene, fileIDs []file.ID, coverImage []byte) (*models.Scene, error) - AssignFile(ctx context.Context, sceneID int, fileID file.ID) error + Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) + AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error Merge(ctx context.Context, sourceIDs []int, destinationID int, values models.ScenePartial) error Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error } type ImageService interface { Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error - DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) + DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } type GalleryService interface { diff --git a/internal/manager/scene.go b/internal/manager/scene.go index 39b96fec74f..ff551754ed1 100644 --- a/internal/manager/scene.go +++ b/internal/manager/scene.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/models" ) @@ -57,7 +56,7 @@ var ( } ) -func GetVideoFileContainer(file *file.VideoFile) (ffmpeg.Container, error) { +func GetVideoFileContainer(file *models.VideoFile) (ffmpeg.Container, error) { var container ffmpeg.Container format := file.Format if format != "" { @@ -88,7 +87,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL *url.URL, maxStrea // convert StreamingResolutionEnum to ResolutionEnum maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize) - sceneResolution := file.GetMinResolution(pf) + sceneResolution := models.GetMinResolution(pf) includeSceneStreamPath := func(streamingResolution models.StreamingResolutionEnum) bool { var minResolution int if streamingResolution == models.StreamingResolutionEnumOriginal { diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go index 43cbc92d986..207c6381866 100644 --- a/internal/manager/task_clean.go +++ b/internal/manager/task_clean.go @@ -257,7 +257,7 @@ type cleanHandler struct { PluginCache *plugin.Cache } -func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { if err := h.handleRelatedScenes(ctx, fileDeleter, fileID); err != nil { return err } @@ -271,11 +271,11 @@ func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter return nil } -func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID file.FolderID) error { +func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID models.FolderID) error { return h.deleteRelatedFolderGalleries(ctx, folderID) } -func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { mgr := GetInstance() sceneQB := mgr.Database.Scene scenes, err := sceneQB.FindByFileID(ctx, fileID) @@ -313,7 +313,7 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range scene.Files.List() { if f.ID != fileID { newPrimaryID = f.ID @@ -321,9 +321,10 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil } } - if _, err := mgr.Repository.Scene.UpdatePartial(ctx, scene.ID, models.ScenePartial{ - PrimaryFileID: &newPrimaryID, - }); err != nil { + scenePartial := models.NewScenePartial() + scenePartial.PrimaryFileID = &newPrimaryID + + if _, err := mgr.Repository.Scene.UpdatePartial(ctx, scene.ID, scenePartial); err != nil { return err } } @@ -332,7 +333,7 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil return nil } -func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.ID) error { +func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID models.FileID) error { mgr := GetInstance() qb := mgr.Database.Gallery galleries, err := qb.FindByFileID(ctx, fileID) @@ -358,7 +359,7 @@ func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.I }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range g.Files.List() { if f.Base().ID != fileID { newPrimaryID = f.Base().ID @@ -366,9 +367,10 @@ func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.I } } - if _, err := mgr.Repository.Gallery.UpdatePartial(ctx, g.ID, models.GalleryPartial{ - PrimaryFileID: &newPrimaryID, - }); err != nil { + galleryPartial := models.NewGalleryPartial() + galleryPartial.PrimaryFileID = &newPrimaryID + + if _, err := mgr.Repository.Gallery.UpdatePartial(ctx, g.ID, galleryPartial); err != nil { return err } } @@ -377,7 +379,7 @@ func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.I return nil } -func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID file.FolderID) error { +func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID models.FolderID) error { mgr := GetInstance() qb := mgr.Database.Gallery galleries, err := qb.FindByFolderID(ctx, folderID) @@ -401,7 +403,7 @@ func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderI return nil } -func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { mgr := GetInstance() imageQB := mgr.Database.Image images, err := imageQB.FindByFileID(ctx, fileID) @@ -431,7 +433,7 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range i.Files.List() { if f.Base().ID != fileID { newPrimaryID = f.Base().ID @@ -439,9 +441,10 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil } } - if _, err := mgr.Repository.Image.UpdatePartial(ctx, i.ID, models.ImagePartial{ - PrimaryFileID: &newPrimaryID, - }); err != nil { + imagePartial := models.NewImagePartial() + imagePartial.PrimaryFileID = &newPrimaryID + + if _, err := mgr.Repository.Image.UpdatePartial(ctx, i.ID, imagePartial); err != nil { return err } } diff --git a/internal/manager/task_export.go b/internal/manager/task_export.go index f186d3eb48d..a7278253ecc 100644 --- a/internal/manager/task_export.go +++ b/internal/manager/task_export.go @@ -13,7 +13,6 @@ import ( "time" "github.com/stashapp/stash/internal/manager/config" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" @@ -386,7 +385,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Reposit logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers) } -func exportFile(f file.File, t *ExportTask) { +func exportFile(f models.File, t *ExportTask) { newFileJSON := fileToJSON(f) fn := newFileJSON.Filename() @@ -396,7 +395,7 @@ func exportFile(f file.File, t *ExportTask) { } } -func fileToJSON(f file.File) jsonschema.DirEntry { +func fileToJSON(f models.File) jsonschema.DirEntry { bf := f.Base() base := jsonschema.BaseFile{ @@ -422,7 +421,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { } switch ff := f.(type) { - case *file.VideoFile: + case *models.VideoFile: base.Type = jsonschema.DirEntryTypeVideo return jsonschema.VideoFile{ BaseFile: &base, @@ -437,7 +436,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { Interactive: ff.Interactive, InteractiveSpeed: ff.InteractiveSpeed, } - case *file.ImageFile: + case *models.ImageFile: base.Type = jsonschema.DirEntryTypeImage return jsonschema.ImageFile{ BaseFile: &base, @@ -450,7 +449,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { return &base } -func exportFolder(f file.Folder, t *ExportTask) { +func exportFolder(f models.Folder, t *ExportTask) { newFileJSON := folderToJSON(f) fn := newFileJSON.Filename() @@ -460,7 +459,7 @@ func exportFolder(f file.Folder, t *ExportTask) { } } -func folderToJSON(f file.Folder) jsonschema.DirEntry { +func folderToJSON(f models.Folder) jsonschema.DirEntry { base := jsonschema.BaseDirEntry{ Type: jsonschema.DirEntryTypeFolder, ModTime: json.JSONTime{Time: f.ModTime}, @@ -648,6 +647,11 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models continue } + if err := s.LoadURLs(ctx, repo.Image); err != nil { + logger.Errorf("[images] <%s> error getting image urls: %s", imageHash, err.Error()) + continue + } + newImageJSON := image.ToBasicJSON(s) // export files diff --git a/internal/manager/task_generate_clip_preview.go b/internal/manager/task_generate_clip_preview.go index c0ecfeedfdb..e8f98cd17d4 100644 --- a/internal/manager/task_generate_clip_preview.go +++ b/internal/manager/task_generate_clip_preview.go @@ -4,7 +4,6 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" @@ -44,7 +43,7 @@ func (t *GenerateClipPreviewTask) Start(ctx context.Context) { } func (t *GenerateClipPreviewTask) required() bool { - _, ok := t.Image.Files.Primary().(*file.VideoFile) + _, ok := t.Image.Files.Primary().(*models.VideoFile) if !ok { return false } diff --git a/internal/manager/task_generate_markers.go b/internal/manager/task_generate_markers.go index 5d709874f39..fa5ac902255 100644 --- a/internal/manager/task_generate_markers.go +++ b/internal/manager/task_generate_markers.go @@ -5,7 +5,6 @@ import ( "fmt" "path/filepath" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -102,7 +101,7 @@ func (t *GenerateMarkersTask) generateSceneMarkers(ctx context.Context) { } } -func (t *GenerateMarkersTask) generateMarker(videoFile *file.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) { +func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) { sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) seconds := int(sceneMarker.Seconds) diff --git a/internal/manager/task_generate_phash.go b/internal/manager/task_generate_phash.go index 8ae84b02e03..9f3945da34c 100644 --- a/internal/manager/task_generate_phash.go +++ b/internal/manager/task_generate_phash.go @@ -4,7 +4,6 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/hash/videophash" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -12,11 +11,11 @@ import ( ) type GeneratePhashTask struct { - File *file.VideoFile + File *models.VideoFile Overwrite bool fileNamingAlgorithm models.HashAlgorithm txnManager txn.Manager - fileUpdater file.Updater + fileUpdater models.FileUpdater } func (t *GeneratePhashTask) GetDescription() string { @@ -38,8 +37,8 @@ func (t *GeneratePhashTask) Start(ctx context.Context) { if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error { qb := t.fileUpdater hashValue := int64(*hash) - t.File.Fingerprints = t.File.Fingerprints.AppendUnique(file.Fingerprint{ - Type: file.FingerprintTypePhash, + t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{ + Type: models.FingerprintTypePhash, Fingerprint: hashValue, }) @@ -54,5 +53,5 @@ func (t *GeneratePhashTask) required() bool { return true } - return t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil + return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil } diff --git a/internal/manager/task_generate_screenshot.go b/internal/manager/task_generate_screenshot.go index 384d8740c7b..1050ebd1c05 100644 --- a/internal/manager/task_generate_screenshot.go +++ b/internal/manager/task_generate_screenshot.go @@ -72,7 +72,7 @@ func (t *GenerateCoverTask) Start(ctx context.Context) { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { qb := t.txnManager.Scene - updatedScene := models.NewScenePartial() + scenePartial := models.NewScenePartial() // update the scene cover table if err := qb.UpdateCover(ctx, t.Scene.ID, coverImageData); err != nil { @@ -80,7 +80,7 @@ func (t *GenerateCoverTask) Start(ctx context.Context) { } // update the scene with the update date - _, err = qb.UpdatePartial(ctx, t.Scene.ID, updatedScene) + _, err = qb.UpdatePartial(ctx, t.Scene.ID, scenePartial) if err != nil { return fmt.Errorf("error updating scene: %v", err) } diff --git a/internal/manager/task_identify.go b/internal/manager/task_identify.go index f7ee5784cbd..0022a69ca31 100644 --- a/internal/manager/task_identify.go +++ b/internal/manager/task_identify.go @@ -136,7 +136,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source SceneReaderUpdater: instance.Repository.Scene, StudioReaderWriter: instance.Repository.Studio, PerformerCreator: instance.Repository.Performer, - TagCreatorFinder: instance.Repository.Tag, + TagFinderCreator: instance.Repository.Tag, DefaultOptions: j.input.Options, Sources: sources, diff --git a/internal/manager/task_import.go b/internal/manager/task_import.go index aa0e7ec6358..c0f97e254ae 100644 --- a/internal/manager/task_import.go +++ b/internal/manager/task_import.go @@ -10,6 +10,7 @@ import ( "path/filepath" "github.com/99designs/gqlgen/graphql" + "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" @@ -281,7 +282,7 @@ func (t *ImportTask) ImportStudios(ctx context.Context) { logger.Info("[studios] import complete") } -func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, readerWriter studio.NameFinderCreatorUpdater) error { +func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, readerWriter studio.ImporterReaderWriter) error { importer := &studio.Importer{ ReaderWriter: readerWriter, Input: *studioJSON, @@ -385,7 +386,7 @@ func (t *ImportTask) ImportFiles(ctx context.Context) { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { return t.ImportFile(ctx, fileJSON, pendingParent) }); err != nil { - if errors.Is(err, errZipFileNotExist) { + if errors.Is(err, file.ErrZipFileNotExist) { // add to the pending parent list so that it is created after the parent s := pendingParent[fileJSON.DirEntry().ZipFile] s = append(s, fileJSON) @@ -421,7 +422,7 @@ func (t *ImportTask) ImportFile(ctx context.Context, fileJSON jsonschema.DirEntr r := t.txnManager readerWriter := r.File - fileImporter := &fileFolderImporter{ + fileImporter := &file.Importer{ ReaderWriter: readerWriter, FolderStore: r.Folder, Input: fileJSON, @@ -569,7 +570,7 @@ func (t *ImportTask) ImportTags(ctx context.Context) { logger.Info("[tags] import complete") } -func (t *ImportTask) ImportTag(ctx context.Context, tagJSON *jsonschema.Tag, pendingParent map[string][]*jsonschema.Tag, fail bool, readerWriter tag.NameFinderCreatorUpdater) error { +func (t *ImportTask) ImportTag(ctx context.Context, tagJSON *jsonschema.Tag, pendingParent map[string][]*jsonschema.Tag, fail bool, readerWriter tag.ImporterReaderWriter) error { importer := &tag.Importer{ ReaderWriter: readerWriter, Input: *tagJSON, diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index 7c5e2015641..f1f3e39272f 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -96,17 +96,17 @@ func newExtensionConfig(c *config.Instance) extensionConfig { } type fileCounter interface { - CountByFileID(ctx context.Context, fileID file.ID) (int, error) + CountByFileID(ctx context.Context, fileID models.FileID) (int, error) } type galleryFinder interface { fileCounter - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) } type sceneFinder interface { fileCounter - FindByPrimaryFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) + FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } // handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated. @@ -139,7 +139,7 @@ func newHandlerRequiredFilter(c *config.Instance) *handlerRequiredFilter { } } -func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool { +func (f *handlerRequiredFilter) Accept(ctx context.Context, ff models.File) bool { path := ff.Base().Path isVideoFile := useAsVideo(path) isImageFile := useAsImage(path) @@ -213,7 +213,7 @@ func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool { // clean captions - scene handler handles this as well, but // unchanged files aren't processed by the scene handler - videoFile, _ := ff.(*file.VideoFile) + videoFile, _ := ff.(*models.VideoFile) if videoFile != nil { if err := video.CleanCaptions(ctx, videoFile, f.txnManager, f.CaptionUpdater); err != nil { logger.Errorf("Error cleaning captions: %v", err) @@ -370,7 +370,7 @@ type imageGenerators struct { progress *job.Progress } -func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file.File) error { +func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f models.File) error { const overwrite = false progress := g.progress @@ -387,12 +387,12 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file. } // avoid adding a task if the file isn't a video file - _, isVideo := f.(*file.VideoFile) + _, isVideo := f.(*models.VideoFile) if isVideo && t.ScanGenerateClipPreviews { // this is a bit of a hack: the task requires files to be loaded, but // we don't really need to since we already have the file ii := *i - ii.Files = models.NewRelatedFiles([]file.File{f}) + ii.Files = models.NewRelatedFiles([]models.File{f}) progress.AddTotal(1) previewsFn := func(ctx context.Context) { @@ -415,7 +415,7 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file. return nil } -func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f file.File) error { +func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f models.File) error { thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) exists, _ := fsutil.FileExists(thumbPath) if exists { @@ -424,12 +424,12 @@ func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image path := f.Base().Path - asFrame, ok := f.(file.VisualFile) + vf, ok := f.(models.VisualFile) if !ok { - return fmt.Errorf("file %s does not implement Frame", path) + return fmt.Errorf("file %s is not a visual file", path) } - if asFrame.GetHeight() <= models.DefaultGthumbWidth && asFrame.GetWidth() <= models.DefaultGthumbWidth { + if vf.GetHeight() <= models.DefaultGthumbWidth && vf.GetWidth() <= models.DefaultGthumbWidth { return nil } @@ -466,7 +466,7 @@ type sceneGenerators struct { progress *job.Progress } -func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error { +func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error { const overwrite = false progress := g.progress diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go index 866c8205cb9..6833f166343 100644 --- a/internal/manager/task_stash_box_tag.go +++ b/internal/manager/task_stash_box_tag.go @@ -138,9 +138,6 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m if t.performer != nil { storedID, _ := strconv.Atoi(*p.StoredID) - existingStashIDs := getStashIDsForPerformer(ctx, storedID) - partial := p.ToPartial(t.box.Endpoint, excluded, existingStashIDs) - image, err := p.GetImage(ctx, excluded) if err != nil { logger.Errorf("Error processing scraped performer image for %s: %v", *p.Name, err) @@ -151,6 +148,13 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m err = txn.WithTxn(ctx, instance.Repository, func(ctx context.Context) error { qb := instance.Repository.Performer + existingStashIDs, err := qb.GetStashIDs(ctx, storedID) + if err != nil { + return err + } + + partial := p.ToPartial(t.box.Endpoint, excluded, existingStashIDs) + if _, err := qb.UpdatePartial(ctx, t.performer.ID, partial); err != nil { return err } @@ -199,16 +203,6 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m } } -func getStashIDsForPerformer(ctx context.Context, performerID int) []models.StashID { - tempPerformer := &models.Performer{ID: performerID} - - err := tempPerformer.LoadStashIDs(ctx, instance.Repository.Performer) - if err != nil { - return nil - } - return tempPerformer.StashIDs.List() -} - func (t *StashBoxBatchTagTask) stashBoxStudioTag(ctx context.Context) { studio, err := t.findStashBoxStudio(ctx) if err != nil { @@ -292,9 +286,6 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode } } - existingStashIDs := getStashIDsForStudio(ctx, storedID) - partial := s.ToPartial(s.StoredID, t.box.Endpoint, excluded, existingStashIDs) - image, err := s.GetImage(ctx, excluded) if err != nil { logger.Errorf("Error processing scraped studio image for %s: %v", s.Name, err) @@ -305,6 +296,13 @@ func (t *StashBoxBatchTagTask) processMatchedStudio(ctx context.Context, s *mode err = txn.WithTxn(ctx, instance.Repository, func(ctx context.Context) error { qb := instance.Repository.Studio + existingStashIDs, err := qb.GetStashIDs(ctx, storedID) + if err != nil { + return err + } + + partial := s.ToPartial(s.StoredID, t.box.Endpoint, excluded, existingStashIDs) + if err := studio.ValidateModify(ctx, *partial, qb); err != nil { return err } @@ -400,11 +398,8 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * } return err } else { - storedID, _ := strconv.Atoi(*parent.StoredID) - // The parent studio matched an existing one and the user has chosen in the UI to link and/or update it - existingStashIDs := getStashIDsForStudio(ctx, storedID) - partial := parent.ToPartial(parent.StoredID, t.box.Endpoint, excluded, existingStashIDs) + storedID, _ := strconv.Atoi(*parent.StoredID) image, err := parent.GetImage(ctx, excluded) if err != nil { @@ -416,7 +411,14 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * err = txn.WithTxn(ctx, instance.Repository, func(ctx context.Context) error { qb := instance.Repository.Studio - if err := studio.ValidateModify(ctx, *partial, instance.Repository.Studio); err != nil { + existingStashIDs, err := qb.GetStashIDs(ctx, storedID) + if err != nil { + return err + } + + partial := parent.ToPartial(parent.StoredID, t.box.Endpoint, excluded, existingStashIDs) + + if err := studio.ValidateModify(ctx, *partial, qb); err != nil { return err } @@ -440,13 +442,3 @@ func (t *StashBoxBatchTagTask) processParentStudio(ctx context.Context, parent * return err } } - -func getStashIDsForStudio(ctx context.Context, studioID int) []models.StashID { - tempStudio := &models.Studio{ID: studioID} - - err := tempStudio.LoadStashIDs(ctx, instance.Repository.Studio) - if err != nil { - return nil - } - return tempStudio.StashIDs.List() -} diff --git a/pkg/ffmpeg/stream_segmented.go b/pkg/ffmpeg/stream_segmented.go index fa7347582a9..68e6f42822b 100644 --- a/pkg/ffmpeg/stream_segmented.go +++ b/pkg/ffmpeg/stream_segmented.go @@ -16,7 +16,6 @@ import ( "sync/atomic" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -51,7 +50,7 @@ const ( type StreamType struct { Name string SegmentType *SegmentType - ServeManifest func(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) + ServeManifest func(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) Args func(codec VideoCodec, segment int, videoFilter VideoFilter, videoOnly bool, outputDir string) Args } @@ -250,7 +249,7 @@ var ErrInvalidSegment = errors.New("invalid segment") type StreamOptions struct { StreamType *StreamType - VideoFile *file.VideoFile + VideoFile *models.VideoFile Resolution string Hash string Segment string @@ -279,7 +278,7 @@ type waitingSegment struct { type runningStream struct { dir string streamType *StreamType - vf *file.VideoFile + vf *models.VideoFile maxTranscodeSize int outputDir string @@ -394,7 +393,7 @@ func (tp *transcodeProcess) checkSegments() { } } -func lastSegment(vf *file.VideoFile) int { +func lastSegment(vf *models.VideoFile) int { return int(math.Ceil(vf.Duration/segmentLength)) - 1 } @@ -405,7 +404,7 @@ func segmentExists(path string) bool { // serveHLSManifest serves a generated HLS playlist. The URLs for the segments // are of the form {r.URL}/%d.ts{?urlQuery} where %d is the segment index. -func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) { +func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) { if sm.cacheDir == "" { logger.Error("[transcode] cannot live transcode with HLS because cache dir is unset") http.Error(w, "cannot live transcode with HLS because cache dir is unset", http.StatusServiceUnavailable) @@ -460,7 +459,7 @@ func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, } // serveDASHManifest serves a generated DASH manifest. -func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) { +func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) { if sm.cacheDir == "" { logger.Error("[transcode] cannot live transcode with DASH because cache dir is unset") http.Error(w, "cannot live transcode files with DASH because cache dir is unset", http.StatusServiceUnavailable) @@ -550,7 +549,7 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request utils.ServeStaticContent(w, r, buf.Bytes()) } -func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *file.VideoFile, resolution string) { +func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *models.VideoFile, resolution string) { streamType.ServeManifest(sm, w, r, vf, resolution) } diff --git a/pkg/ffmpeg/stream_transcode.go b/pkg/ffmpeg/stream_transcode.go index cd123183ffb..8c19af3a606 100644 --- a/pkg/ffmpeg/stream_transcode.go +++ b/pkg/ffmpeg/stream_transcode.go @@ -8,7 +8,6 @@ import ( "strings" "syscall" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -134,7 +133,7 @@ var ( type TranscodeOptions struct { StreamType StreamFormat - VideoFile *file.VideoFile + VideoFile *models.VideoFile Resolution string StartTime float64 } @@ -267,7 +266,7 @@ func (sm *StreamManager) getTranscodeStream(ctx *fsutil.LockContext, options Tra // process killing should be handled by command context _, err := io.Copy(w, stdout) - if err != nil && !errors.Is(err, syscall.EPIPE) { + if err != nil && !errors.Is(err, syscall.EPIPE) && !errors.Is(err, syscall.ECONNRESET) { logger.Errorf("[transcode] error serving transcoded video file: %v", err) } diff --git a/pkg/file/clean.go b/pkg/file/clean.go index 44470c5a093..d3e27a774a2 100644 --- a/pkg/file/clean.go +++ b/pkg/file/clean.go @@ -10,12 +10,13 @@ import ( "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) // Cleaner scans through stored file and folder instances and removes those that are no longer present on disk. type Cleaner struct { - FS FS + FS models.FS Repository Repository Handlers []CleanHandler @@ -55,44 +56,44 @@ func (s *Cleaner) Clean(ctx context.Context, options CleanOptions, progress *job } type fileOrFolder struct { - fileID ID - folderID FolderID + fileID models.FileID + folderID models.FolderID } type deleteSet struct { orderedList []fileOrFolder - fileIDSet map[ID]string + fileIDSet map[models.FileID]string - folderIDSet map[FolderID]string + folderIDSet map[models.FolderID]string } func newDeleteSet() deleteSet { return deleteSet{ - fileIDSet: make(map[ID]string), - folderIDSet: make(map[FolderID]string), + fileIDSet: make(map[models.FileID]string), + folderIDSet: make(map[models.FolderID]string), } } -func (s *deleteSet) add(id ID, path string) { +func (s *deleteSet) add(id models.FileID, path string) { if _, ok := s.fileIDSet[id]; !ok { s.orderedList = append(s.orderedList, fileOrFolder{fileID: id}) s.fileIDSet[id] = path } } -func (s *deleteSet) has(id ID) bool { +func (s *deleteSet) has(id models.FileID) bool { _, ok := s.fileIDSet[id] return ok } -func (s *deleteSet) addFolder(id FolderID, path string) { +func (s *deleteSet) addFolder(id models.FolderID, path string) { if _, ok := s.folderIDSet[id]; !ok { s.orderedList = append(s.orderedList, fileOrFolder{folderID: id}) s.folderIDSet[id] = path } } -func (s *deleteSet) hasFolder(id FolderID) bool { +func (s *deleteSet) hasFolder(id models.FolderID) bool { _, ok := s.folderIDSet[id] return ok } @@ -113,7 +114,7 @@ func (j *cleanJob) execute(ctx context.Context) error { if err := txn.WithReadTxn(ctx, j.Repository, func(ctx context.Context) error { var err error - fileCount, err = j.Repository.CountAllInPaths(ctx, j.options.Paths) + fileCount, err = j.Repository.FileStore.CountAllInPaths(ctx, j.options.Paths) if err != nil { return err } @@ -177,7 +178,7 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { return nil } - files, err := j.Repository.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) + files, err := j.Repository.FileStore.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) if err != nil { return fmt.Errorf("error querying for files: %w", err) } @@ -221,9 +222,9 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { } // flagFolderForDelete adds folders to the toDelete set, with the leaf folders added first -func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f File) error { +func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f models.File) error { // add contained files first - containedFiles, err := j.Repository.FindByZipFileID(ctx, f.Base().ID) + containedFiles, err := j.Repository.FileStore.FindByZipFileID(ctx, f.Base().ID) if err != nil { return fmt.Errorf("error finding contained files for %q: %w", f.Base().Path, err) } @@ -306,7 +307,7 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error return nil } -func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *Folder) error { +func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *models.Folder) error { // it is possible that child folders may be included while parent folders are not // so we need to check child folders separately toDelete.addFolder(folder.ID, folder.Path) @@ -314,7 +315,7 @@ func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, return nil } -func (j *cleanJob) shouldClean(ctx context.Context, f File) bool { +func (j *cleanJob) shouldClean(ctx context.Context, f models.File) bool { path := f.Base().Path info, err := f.Base().Info(j.FS) @@ -336,7 +337,7 @@ func (j *cleanJob) shouldClean(ctx context.Context, f File) bool { return !filter.Accept(ctx, path, info) } -func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool { +func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool { path := f.Path info, err := f.Info(j.FS) @@ -376,7 +377,7 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool { return !filter.Accept(ctx, path, info) } -func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) { +func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) { // delete associated objects fileDeleter := NewDeleter() if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error { @@ -386,14 +387,14 @@ func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) { return err } - return j.Repository.Destroy(ctx, fileID) + return j.Repository.FileStore.Destroy(ctx, fileID) }); err != nil { logger.Errorf("Error deleting file %q from database: %s", fn, err.Error()) return } } -func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn string) { +func (j *cleanJob) deleteFolder(ctx context.Context, folderID models.FolderID, fn string) { // delete associated objects fileDeleter := NewDeleter() if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error { @@ -410,7 +411,7 @@ func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn strin } } -func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID ID) error { +func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID models.FileID) error { for _, h := range j.Handlers { if err := h.HandleFile(ctx, fileDeleter, fileID); err != nil { return err @@ -420,7 +421,7 @@ func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileI return nil } -func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error { +func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID models.FolderID) error { for _, h := range j.Handlers { if err := h.HandleFolder(ctx, fileDeleter, folderID); err != nil { return err diff --git a/pkg/file/delete.go b/pkg/file/delete.go index 9ee27c1767d..88eb5169eac 100644 --- a/pkg/file/delete.go +++ b/pkg/file/delete.go @@ -9,6 +9,7 @@ import ( "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -179,7 +180,7 @@ func (d *Deleter) renameForRestore(path string) error { return d.RenamerRemover.Rename(path+deleteFileSuffix, path) } -func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Deleter, deleteFile bool) error { +func Destroy(ctx context.Context, destroyer models.FileDestroyer, f models.File, fileDeleter *Deleter, deleteFile bool) error { if err := destroyer.Destroy(ctx, f.Base().ID); err != nil { return err } @@ -195,11 +196,11 @@ func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Dele } type ZipDestroyer struct { - FileDestroyer GetterDestroyer - FolderDestroyer FolderGetterDestroyer + FileDestroyer models.FileFinderDestroyer + FolderDestroyer models.FolderFinderDestroyer } -func (d *ZipDestroyer) DestroyZip(ctx context.Context, f File, fileDeleter *Deleter, deleteFile bool) error { +func (d *ZipDestroyer) DestroyZip(ctx context.Context, f models.File, fileDeleter *Deleter, deleteFile bool) error { // destroy contained files files, err := d.FileDestroyer.FindByZipFileID(ctx, f.Base().ID) if err != nil { diff --git a/pkg/file/file.go b/pkg/file/file.go index 50a2d613868..179e1e01af7 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -1,225 +1,15 @@ package file import ( - "bytes" - "context" - "io" - "io/fs" - "net/http" - "strconv" - "time" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/txn" ) -// ID represents an ID of a file. -type ID int32 +// Repository provides access to storage methods for files and folders. +type Repository struct { + txn.Manager + txn.DatabaseProvider -func (i ID) String() string { - return strconv.Itoa(int(i)) -} - -// DirEntry represents a file or directory in the file system. -type DirEntry struct { - ZipFileID *ID `json:"zip_file_id"` - - // transient - not persisted - // only guaranteed to have id, path and basename set - ZipFile File - - ModTime time.Time `json:"mod_time"` -} - -func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) { - if e.ZipFile != nil { - zipPath := e.ZipFile.Base().Path - zfs, err := fs.OpenZip(zipPath) - if err != nil { - return nil, err - } - defer zfs.Close() - fs = zfs - } - // else assume os file - - ret, err := fs.Lstat(path) - return ret, err -} - -// File represents a file in the file system. -type File interface { - Base() *BaseFile - SetFingerprints(fp Fingerprints) - Open(fs FS) (io.ReadCloser, error) -} - -// BaseFile represents a file in the file system. -type BaseFile struct { - ID ID `json:"id"` - - DirEntry - - // resolved from parent folder and basename only - not stored in DB - Path string `json:"path"` - - Basename string `json:"basename"` - ParentFolderID FolderID `json:"parent_folder_id"` - - Fingerprints Fingerprints `json:"fingerprints"` - - Size int64 `json:"size"` - - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - -// SetFingerprints sets the fingerprints of the file. -// If a fingerprint of the same type already exists, it is overwritten. -func (f *BaseFile) SetFingerprints(fp Fingerprints) { - for _, v := range fp { - f.SetFingerprint(v) - } -} - -// SetFingerprint sets the fingerprint of the file. -// If a fingerprint of the same type already exists, it is overwritten. -func (f *BaseFile) SetFingerprint(fp Fingerprint) { - for i, existing := range f.Fingerprints { - if existing.Type == fp.Type { - f.Fingerprints[i] = fp - return - } - } - - f.Fingerprints = append(f.Fingerprints, fp) -} - -// Base is used to fulfil the File interface. -func (f *BaseFile) Base() *BaseFile { - return f -} - -func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) { - if f.ZipFile != nil { - zipPath := f.ZipFile.Base().Path - zfs, err := fs.OpenZip(zipPath) - if err != nil { - return nil, err - } - - return zfs.OpenOnly(f.Path) - } - - return fs.Open(f.Path) -} - -func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) { - return f.info(fs, f.Path) -} - -func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { - reader, err := f.Open(fs) - if err != nil { - return err - } - - defer reader.Close() - - content, ok := reader.(io.ReadSeeker) - if !ok { - data, err := io.ReadAll(reader) - if err != nil { - return err - } - content = bytes.NewReader(data) - } - - if r.URL.Query().Has("t") { - w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") - } else { - w.Header().Set("Cache-Control", "no-cache") - } - http.ServeContent(w, r, f.Basename, f.ModTime, content) - - return nil -} - -type Finder interface { - Find(ctx context.Context, id ...ID) ([]File, error) -} - -// Getter provides methods to find Files. -type Getter interface { - Finder - FindByPath(ctx context.Context, path string) (File, error) - FindAllByPath(ctx context.Context, path string) ([]File, error) - FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) - FindByZipFileID(ctx context.Context, zipFileID ID) ([]File, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) - FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error) -} - -type Counter interface { - CountAllInPaths(ctx context.Context, p []string) (int, error) - CountByFolderID(ctx context.Context, folderID FolderID) (int, error) -} - -// Creator provides methods to create Files. -type Creator interface { - Create(ctx context.Context, f File) error -} - -// Updater provides methods to update Files. -type Updater interface { - Update(ctx context.Context, f File) error -} - -type Destroyer interface { - Destroy(ctx context.Context, id ID) error -} - -type GetterUpdater interface { - Getter - Updater -} - -type GetterDestroyer interface { - Getter - Destroyer -} - -// Store provides methods to find, create and update Files. -type Store interface { - Getter - Counter - Creator - Updater - Destroyer - - IsPrimary(ctx context.Context, fileID ID) (bool, error) -} - -// Decorator wraps the Decorate method to add additional functionality while scanning files. -type Decorator interface { - Decorate(ctx context.Context, fs FS, f File) (File, error) - IsMissingMetadata(ctx context.Context, fs FS, f File) bool -} - -type FilteredDecorator struct { - Decorator - Filter -} - -// Decorate runs the decorator if the filter accepts the file. -func (d *FilteredDecorator) Decorate(ctx context.Context, fs FS, f File) (File, error) { - if d.Accept(ctx, f) { - return d.Decorator.Decorate(ctx, fs, f) - } - return f, nil -} - -func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs FS, f File) bool { - if d.Accept(ctx, f) { - return d.Decorator.IsMissingMetadata(ctx, fs, f) - } - - return false + FileStore models.FileReaderWriter + FolderStore models.FolderReaderWriter } diff --git a/pkg/file/folder.go b/pkg/file/folder.go index 5ffd7f2b557..02087dd4117 100644 --- a/pkg/file/folder.go +++ b/pkg/file/folder.go @@ -3,94 +3,16 @@ package file import ( "context" "fmt" - "io/fs" "path/filepath" - "strconv" "strings" "time" -) - -// FolderID represents an ID of a folder. -type FolderID int32 - -// String converts the ID to a string. -func (i FolderID) String() string { - return strconv.Itoa(int(i)) -} - -// Folder represents a folder in the file system. -type Folder struct { - ID FolderID `json:"id"` - DirEntry - Path string `json:"path"` - ParentFolderID *FolderID `json:"parent_folder_id"` - - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - -func (f *Folder) Info(fs FS) (fs.FileInfo, error) { - return f.info(fs, f.Path) -} - -type FolderFinder interface { - Find(ctx context.Context, id FolderID) (*Folder, error) -} - -// FolderPathFinder finds Folders by their path. -type FolderPathFinder interface { - FindByPath(ctx context.Context, path string) (*Folder, error) -} - -// FolderGetter provides methods to find Folders. -type FolderGetter interface { - FolderFinder - FolderPathFinder - FindByZipFileID(ctx context.Context, zipFileID ID) ([]*Folder, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) - FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) -} -type FolderCounter interface { - CountAllInPaths(ctx context.Context, p []string) (int, error) -} - -// FolderCreator provides methods to create Folders. -type FolderCreator interface { - Create(ctx context.Context, f *Folder) error -} - -type FolderFinderCreator interface { - FolderPathFinder - FolderCreator -} - -// FolderUpdater provides methods to update Folders. -type FolderUpdater interface { - Update(ctx context.Context, f *Folder) error -} - -type FolderDestroyer interface { - Destroy(ctx context.Context, id FolderID) error -} - -type FolderGetterDestroyer interface { - FolderGetter - FolderDestroyer -} - -// FolderStore provides methods to find, create and update Folders. -type FolderStore interface { - FolderGetter - FolderCounter - FolderCreator - FolderUpdater - FolderDestroyer -} + "github.com/stashapp/stash/pkg/models" +) // GetOrCreateFolderHierarchy gets the folder for the given path, or creates a folder hierarchy for the given path if one if no existing folder is found. // Does not create any folders in the file system -func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, path string) (*Folder, error) { +func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) { // get or create folder hierarchy folder, err := fc.FindByPath(ctx, path) if err != nil { @@ -106,10 +28,10 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, pat now := time.Now() - folder = &Folder{ + folder = &models.Folder{ Path: path, ParentFolderID: &parent.ID, - DirEntry: DirEntry{ + DirEntry: models.DirEntry{ // leave mod time empty for now - it will be updated when the folder is scanned }, CreatedAt: now, @@ -126,7 +48,7 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, pat // TransferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes // ZipFileID from folders under oldPath. -func TransferZipFolderHierarchy(ctx context.Context, folderStore FolderStore, zipFileID ID, oldPath string, newPath string) error { +func TransferZipFolderHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, zipFileID models.FileID, oldPath string, newPath string) error { zipFolders, err := folderStore.FindByZipFileID(ctx, zipFileID) if err != nil { return err diff --git a/pkg/file/folder_rename_detect.go b/pkg/file/folder_rename_detect.go index 0e52eb7854c..0b57d9c087a 100644 --- a/pkg/file/folder_rename_detect.go +++ b/pkg/file/folder_rename_detect.go @@ -7,27 +7,28 @@ import ( "io/fs" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) type folderRenameCandidate struct { - folder *Folder + folder *models.Folder found int files int } type folderRenameDetector struct { // candidates is a map of folder id to the number of files that match - candidates map[FolderID]folderRenameCandidate + candidates map[models.FolderID]folderRenameCandidate // rejects is a set of folder ids which were found to still exist - rejects map[FolderID]struct{} + rejects map[models.FolderID]struct{} } -func (d *folderRenameDetector) isReject(id FolderID) bool { +func (d *folderRenameDetector) isReject(id models.FolderID) bool { _, ok := d.rejects[id] return ok } -func (d *folderRenameDetector) getCandidate(id FolderID) *folderRenameCandidate { +func (d *folderRenameDetector) getCandidate(id models.FolderID) *folderRenameCandidate { c, ok := d.candidates[id] if !ok { return nil @@ -40,14 +41,14 @@ func (d *folderRenameDetector) setCandidate(c folderRenameCandidate) { d.candidates[c.folder.ID] = c } -func (d *folderRenameDetector) reject(id FolderID) { +func (d *folderRenameDetector) reject(id models.FolderID) { d.rejects[id] = struct{}{} } // bestCandidate returns the folder that is the best candidate for a rename. // This is the folder that has the largest number of its original files that // are still present in the new location. -func (d *folderRenameDetector) bestCandidate() *Folder { +func (d *folderRenameDetector) bestCandidate() *models.Folder { if len(d.candidates) == 0 { return nil } @@ -74,14 +75,14 @@ func (d *folderRenameDetector) bestCandidate() *Folder { return best.folder } -func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models.Folder, error) { // in order for a folder to be considered moved, the existing folder must be // missing, and the majority of the old folder's files must be present, unchanged, // in the new folder. detector := folderRenameDetector{ - candidates: make(map[FolderID]folderRenameCandidate), - rejects: make(map[FolderID]struct{}), + candidates: make(map[models.FolderID]folderRenameCandidate), + rejects: make(map[models.FolderID]struct{}), } // rejects is a set of folder ids which were found to still exist @@ -117,7 +118,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, } // check if the file exists in the database based on basename, size and mod time - existing, err := s.Repository.Store.FindByFileInfo(ctx, info, size) + existing, err := s.Repository.FileStore.FindByFileInfo(ctx, info, size) if err != nil { return fmt.Errorf("checking for existing file %q: %w", path, err) } @@ -163,7 +164,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, // parent folder is missing, possible candidate // count the total number of files in the existing folder - count, err := s.Repository.Store.CountByFolderID(ctx, parentFolderID) + count, err := s.Repository.FileStore.CountByFolderID(ctx, parentFolderID) if err != nil { return fmt.Errorf("counting files in folder %d: %w", parentFolderID, err) } diff --git a/pkg/file/frame.go b/pkg/file/frame.go deleted file mode 100644 index de9f7466233..00000000000 --- a/pkg/file/frame.go +++ /dev/null @@ -1,20 +0,0 @@ -package file - -// VisualFile is an interface for files that have a width and height. -type VisualFile interface { - File - GetWidth() int - GetHeight() int - GetFormat() string -} - -func GetMinResolution(f VisualFile) int { - w := f.GetWidth() - h := f.GetHeight() - - if w < h { - return w - } - - return h -} diff --git a/pkg/file/fs.go b/pkg/file/fs.go index 09c7c7c8e19..80148cfa126 100644 --- a/pkg/file/fs.go +++ b/pkg/file/fs.go @@ -6,6 +6,7 @@ import ( "os" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" ) // Opener provides an interface to open a file. @@ -14,7 +15,7 @@ type Opener interface { } type fsOpener struct { - fs FS + fs models.FS name string } @@ -22,15 +23,6 @@ func (o *fsOpener) Open() (io.ReadCloser, error) { return o.fs.Open(o.name) } -// FS represents a file system. -type FS interface { - Stat(name string) (fs.FileInfo, error) - Lstat(name string) (fs.FileInfo, error) - Open(name string) (fs.ReadDirFile, error) - OpenZip(name string) (*ZipFS, error) - IsPathCaseSensitive(path string) (bool, error) -} - // OsFS is a file system backed by the OS. type OsFS struct{} @@ -66,7 +58,7 @@ func (f *OsFS) Open(name string) (fs.ReadDirFile, error) { return os.Open(name) } -func (f *OsFS) OpenZip(name string) (*ZipFS, error) { +func (f *OsFS) OpenZip(name string) (models.ZipFS, error) { info, err := f.Lstat(name) if err != nil { return nil, err diff --git a/pkg/file/handler.go b/pkg/file/handler.go index 5932968b65e..10616eefa50 100644 --- a/pkg/file/handler.go +++ b/pkg/file/handler.go @@ -3,6 +3,8 @@ package file import ( "context" "io/fs" + + "github.com/stashapp/stash/pkg/models" ) // PathFilter provides a filter function for paths. @@ -18,18 +20,18 @@ func (pff PathFilterFunc) Accept(path string) bool { // Filter provides a filter function for Files. type Filter interface { - Accept(ctx context.Context, f File) bool + Accept(ctx context.Context, f models.File) bool } -type FilterFunc func(ctx context.Context, f File) bool +type FilterFunc func(ctx context.Context, f models.File) bool -func (ff FilterFunc) Accept(ctx context.Context, f File) bool { +func (ff FilterFunc) Accept(ctx context.Context, f models.File) bool { return ff(ctx, f) } // Handler provides a handler for Files. type Handler interface { - Handle(ctx context.Context, f File, oldFile File) error + Handle(ctx context.Context, f models.File, oldFile models.File) error } // FilteredHandler is a Handler runs only if the filter accepts the file. @@ -39,7 +41,7 @@ type FilteredHandler struct { } // Handle runs the handler if the filter accepts the file. -func (h *FilteredHandler) Handle(ctx context.Context, f File, oldFile File) error { +func (h *FilteredHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if h.Accept(ctx, f) { return h.Handler.Handle(ctx, f, oldFile) } @@ -48,6 +50,6 @@ func (h *FilteredHandler) Handle(ctx context.Context, f File, oldFile File) erro // CleanHandler provides a handler for cleaning Files and Folders. type CleanHandler interface { - HandleFile(ctx context.Context, fileDeleter *Deleter, fileID ID) error - HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error + HandleFile(ctx context.Context, fileDeleter *Deleter, fileID models.FileID) error + HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID models.FolderID) error } diff --git a/pkg/file/image/scan.go b/pkg/file/image/scan.go index 5203adba9e2..ba22bbee988 100644 --- a/pkg/file/image/scan.go +++ b/pkg/file/image/scan.go @@ -13,6 +13,7 @@ import ( "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" _ "golang.org/x/image/webp" ) @@ -21,10 +22,10 @@ type Decorator struct { FFProbe ffmpeg.FFProbe } -func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) { +func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { base := f.Base() - decorateFallback := func() (file.File, error) { + decorateFallback := func() (models.File, error) { r, err := fs.Open(base.Path) if err != nil { return f, fmt.Errorf("reading image file %q: %w", base.Path, err) @@ -35,7 +36,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file if err != nil { return f, fmt.Errorf("decoding image file %q: %w", base.Path, err) } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: format, Width: c.Width, @@ -58,7 +59,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file // Fallback to catch non-animated avif images that FFProbe detects as video files if probe.Bitrate == 0 && probe.VideoCodec == "av1" { - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: "avif", Width: probe.Width, @@ -78,7 +79,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file return videoFileDecorator.Decorate(ctx, fs, f) } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: probe.VideoCodec, Width: probe.Width, @@ -86,14 +87,14 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file }, nil } -func (d *Decorator) IsMissingMetadata(ctx context.Context, fs file.FS, f file.File) bool { +func (d *Decorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { const ( unsetString = "unset" unsetNumber = -1 ) - imf, isImage := f.(*file.ImageFile) - vf, isVideo := f.(*file.VideoFile) + imf, isImage := f.(*models.ImageFile) + vf, isVideo := f.(*models.VideoFile) switch { case isImage: diff --git a/pkg/file/image_file.go b/pkg/file/image_file.go deleted file mode 100644 index 0de2d9b9871..00000000000 --- a/pkg/file/image_file.go +++ /dev/null @@ -1,21 +0,0 @@ -package file - -// ImageFile is an extension of BaseFile to represent image files. -type ImageFile struct { - *BaseFile - Format string `json:"format"` - Width int `json:"width"` - Height int `json:"height"` -} - -func (f ImageFile) GetWidth() int { - return f.Width -} - -func (f ImageFile) GetHeight() int { - return f.Height -} - -func (f ImageFile) GetFormat() string { - return f.Format -} diff --git a/internal/manager/import_file.go b/pkg/file/import.go similarity index 68% rename from internal/manager/import_file.go rename to pkg/file/import.go index bad9d5bce0f..0af94a4d211 100644 --- a/internal/manager/import_file.go +++ b/pkg/file/import.go @@ -1,4 +1,4 @@ -package manager +package file import ( "context" @@ -7,24 +7,22 @@ import ( "path/filepath" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" ) -// HACK: this is all here because of an import loop in jsonschema -> models -> file +var ErrZipFileNotExist = errors.New("zip file does not exist") -var errZipFileNotExist = errors.New("zip file does not exist") - -type fileFolderImporter struct { - ReaderWriter file.Store - FolderStore file.FolderStore +type Importer struct { + ReaderWriter models.FileFinderCreator + FolderStore models.FolderFinderCreator Input jsonschema.DirEntry - file file.File - folder *file.Folder + file models.File + folder *models.Folder } -func (i *fileFolderImporter) PreImport(ctx context.Context) error { +func (i *Importer) PreImport(ctx context.Context) error { var err error switch ff := i.Input.(type) { @@ -37,9 +35,9 @@ func (i *fileFolderImporter) PreImport(ctx context.Context) error { return err } -func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) { - ret := file.Folder{ - DirEntry: file.DirEntry{ +func (i *Importer) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*models.Folder, error) { + ret := models.Folder{ + DirEntry: models.DirEntry{ ModTime: baseJSON.ModTime.GetTime(), }, Path: baseJSON.Path, @@ -56,14 +54,14 @@ func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *j return &ret, nil } -func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (file.File, error) { +func (i *Importer) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (models.File, error) { switch ff := fileJSON.(type) { case *jsonschema.VideoFile: baseFile, err := i.baseFileJSONToBaseFile(ctx, ff.BaseFile) if err != nil { return nil, err } - return &file.VideoFile{ + return &models.VideoFile{ BaseFile: baseFile, Format: ff.Format, Width: ff.Width, @@ -81,7 +79,7 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc if err != nil { return nil, err } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: baseFile, Format: ff.Format, Width: ff.Width, @@ -94,9 +92,9 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc return nil, fmt.Errorf("unknown file type") } -func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) { - baseFile := file.BaseFile{ - DirEntry: file.DirEntry{ +func (i *Importer) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*models.BaseFile, error) { + baseFile := models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: baseJSON.ModTime.GetTime(), }, Basename: filepath.Base(baseJSON.Path), @@ -106,7 +104,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO } for _, fp := range baseJSON.Fingerprints { - baseFile.Fingerprints = append(baseFile.Fingerprints, file.Fingerprint{ + baseFile.Fingerprints = append(baseFile.Fingerprints, models.Fingerprint{ Type: fp.Type, Fingerprint: fp.Fingerprint, }) @@ -119,7 +117,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO return &baseFile, nil } -func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error { +func (i *Importer) populateZipFileID(ctx context.Context, f *models.DirEntry) error { zipFilePath := i.Input.DirEntry().ZipFile if zipFilePath != "" { zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath) @@ -128,7 +126,7 @@ func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirE } if zf == nil { - return errZipFileNotExist + return ErrZipFileNotExist } id := zf.Base().ID @@ -138,15 +136,15 @@ func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirE return nil } -func (i *fileFolderImporter) PostImport(ctx context.Context, id int) error { +func (i *Importer) PostImport(ctx context.Context, id int) error { return nil } -func (i *fileFolderImporter) Name() string { +func (i *Importer) Name() string { return i.Input.DirEntry().Path } -func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) { +func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { path := i.Input.DirEntry().Path existing, err := i.ReaderWriter.FindByPath(ctx, path) if err != nil { @@ -161,7 +159,7 @@ func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) { return nil, nil } -func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string) (*file.Folder, error) { +func (i *Importer) createFolderHierarchy(ctx context.Context, p string) (*models.Folder, error) { parentPath := filepath.Dir(p) if parentPath == p { @@ -177,7 +175,7 @@ func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string return i.getOrCreateFolder(ctx, p, parent) } -func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, parent *file.Folder) (*file.Folder, error) { +func (i *Importer) getOrCreateFolder(ctx context.Context, path string, parent *models.Folder) (*models.Folder, error) { folder, err := i.FolderStore.FindByPath(ctx, path) if err != nil { return nil, err @@ -189,7 +187,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, now := time.Now() - folder = &file.Folder{ + folder = &models.Folder{ Path: path, CreatedAt: now, UpdatedAt: now, @@ -207,7 +205,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, return folder, nil } -func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) { +func (i *Importer) Create(ctx context.Context) (*int, error) { // create folder hierarchy and set parent folder id path := i.Input.DirEntry().Path path = filepath.Dir(path) @@ -223,7 +221,7 @@ func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) { return i.createFile(ctx, folder) } -func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file.Folder) (*int, error) { +func (i *Importer) createFile(ctx context.Context, parentFolder *models.Folder) (*int, error) { if parentFolder != nil { i.file.Base().ParentFolderID = parentFolder.ID } @@ -236,7 +234,7 @@ func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file. return &id, nil } -func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *file.Folder) (*int, error) { +func (i *Importer) createFolder(ctx context.Context, parentFolder *models.Folder) (*int, error) { if parentFolder != nil { i.folder.ParentFolderID = &parentFolder.ID } @@ -249,7 +247,7 @@ func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *fil return &id, nil } -func (i *fileFolderImporter) Update(ctx context.Context, id int) error { +func (i *Importer) Update(ctx context.Context, id int) error { // update not supported return nil } diff --git a/pkg/file/move.go b/pkg/file/move.go index 3b3c66ec50d..64a83fed645 100644 --- a/pkg/file/move.go +++ b/pkg/file/move.go @@ -11,6 +11,7 @@ import ( "time" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -40,14 +41,14 @@ func (r folderCreatorStatRenamerImpl) Mkdir(name string, perm os.FileMode) error type Mover struct { Renamer DirMakerStatRenamer - Files GetterUpdater - Folders FolderStore + Files models.FileFinderUpdater + Folders models.FolderReaderWriter moved map[string]string foldersCreated []string } -func NewMover(fileStore GetterUpdater, folderStore FolderStore) *Mover { +func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter) *Mover { return &Mover{ Files: fileStore, Folders: folderStore, @@ -60,7 +61,7 @@ func NewMover(fileStore GetterUpdater, folderStore FolderStore) *Mover { // Move moves the file to the given folder and basename. If basename is empty, then the existing basename is used. // Assumes that the parent folder exists in the filesystem. -func (m *Mover) Move(ctx context.Context, f File, folder *Folder, basename string) error { +func (m *Mover) Move(ctx context.Context, f models.File, folder *models.Folder, basename string) error { fBase := f.Base() // don't allow moving files in zip files diff --git a/pkg/file/scan.go b/pkg/file/scan.go index badb5ab23e5..a0d301e60c2 100644 --- a/pkg/file/scan.go +++ b/pkg/file/scan.go @@ -13,6 +13,7 @@ import ( "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) @@ -24,15 +25,6 @@ const ( maxRetries = -1 ) -// Repository provides access to storage methods for files and folders. -type Repository struct { - txn.Manager - txn.DatabaseProvider - Store - - FolderStore FolderStore -} - // Scanner scans files into the database. // // The scan process works using two goroutines. The first walks through the provided paths @@ -59,7 +51,7 @@ type Repository struct { // If the file is not a renamed file, then the decorators are fired and the file is created, then // the applicable handlers are fired. type Scanner struct { - FS FS + FS models.FS Repository Repository FingerprintCalculator FingerprintCalculator @@ -67,6 +59,38 @@ type Scanner struct { FileDecorators []Decorator } +// FingerprintCalculator calculates a fingerprint for the provided file. +type FingerprintCalculator interface { + CalculateFingerprints(f *models.BaseFile, o Opener, useExisting bool) ([]models.Fingerprint, error) +} + +// Decorator wraps the Decorate method to add additional functionality while scanning files. +type Decorator interface { + Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) + IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool +} + +type FilteredDecorator struct { + Decorator + Filter +} + +// Decorate runs the decorator if the filter accepts the file. +func (d *FilteredDecorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { + if d.Accept(ctx, f) { + return d.Decorator.Decorate(ctx, fs, f) + } + return f, nil +} + +func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { + if d.Accept(ctx, f) { + return d.Decorator.IsMissingMetadata(ctx, fs, f) + } + + return false +} + // ProgressReporter is used to report progress of the scan. type ProgressReporter interface { AddTotal(total int) @@ -129,8 +153,8 @@ func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOpti } type scanFile struct { - *BaseFile - fs FS + *models.BaseFile + fs models.FS info fs.FileInfo } @@ -198,7 +222,7 @@ func (s *scanJob) queueFiles(ctx context.Context, paths []string) error { return err } -func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs.WalkDirFunc { +func (s *scanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *scanFile) fs.WalkDirFunc { return func(path string, d fs.DirEntry, err error) error { if err != nil { // don't let errors prevent scanning @@ -229,8 +253,8 @@ func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs } ff := scanFile{ - BaseFile: &BaseFile{ - DirEntry: DirEntry{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: modTime(info), }, Path: path, @@ -286,7 +310,7 @@ func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs } } -func getFileSize(f FS, path string, info fs.FileInfo) (int64, error) { +func getFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { // #2196/#3042 - replace size with target size if file is a symlink if info.Mode()&os.ModeSymlink == os.ModeSymlink { targetInfo, err := f.Stat(path) @@ -408,10 +432,10 @@ func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) { }) } -func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, error) { +func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { // check the folder cache first if f, ok := s.folderPathToID.Load(path); ok { - v := f.(FolderID) + v := f.(models.FolderID) return &v, nil } @@ -428,7 +452,7 @@ func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, erro return &ret.ID, nil } -func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, error) { +func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*models.FileID, error) { if zipFile == nil { return nil, nil } @@ -441,11 +465,11 @@ func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, err // check the folder cache first if f, ok := s.zipPathToID.Load(path); ok { - v := f.(ID) + v := f.(models.FileID) return &v, nil } - ret, err := s.Repository.FindByPath(ctx, path) + ret, err := s.Repository.FileStore.FindByPath(ctx, path) if err != nil { return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err) } @@ -489,7 +513,7 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { }) } -func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folder, error) { renamed, err := s.handleFolderRename(ctx, file) if err != nil { return nil, err @@ -501,7 +525,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro now := time.Now() - toCreate := &Folder{ + toCreate := &models.Folder{ DirEntry: file.DirEntry, Path: file.Path, CreatedAt: now, @@ -536,7 +560,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro return toCreate, nil } -func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*models.Folder, error) { // ignore folders in zip files if file.ZipFileID != nil { return nil, nil @@ -572,7 +596,7 @@ func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*Folde return renamedFrom, nil } -func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *Folder) (*Folder, error) { +func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *models.Folder) (*models.Folder, error) { update := false // update if mod time is changed @@ -613,12 +637,12 @@ func modTime(info fs.FileInfo) time.Time { func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { defer s.incrementProgress(f) - var ff File + var ff models.File // don't use a transaction to check if new or existing if err := s.withDB(ctx, func(ctx context.Context) error { // determine if file already exists in data store var err error - ff, err = s.Repository.FindByPath(ctx, f.Path) + ff, err = s.Repository.FileStore.FindByPath(ctx, f.Path) if err != nil { return fmt.Errorf("checking for existing file %q: %w", f.Path, err) } @@ -661,7 +685,7 @@ func (s *scanJob) isZipFile(path string) bool { return false } -func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { +func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error) { now := time.Now() baseFile := f.BaseFile @@ -716,7 +740,7 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { // if not renamed, queue file for creation if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Create(ctx, file); err != nil { + if err := s.Repository.FileStore.Create(ctx, file); err != nil { return fmt.Errorf("creating file %q: %w", path, err) } @@ -732,7 +756,7 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { return file, nil } -func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, error) { +func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { for _, h := range s.FileDecorators { var err error f, err = h.Decorate(ctx, fs, f) @@ -744,7 +768,7 @@ func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, erro return f, nil } -func (s *scanJob) fireHandlers(ctx context.Context, f File, oldFile File) error { +func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { for _, h := range s.handlers { if err := h.Handle(ctx, f, oldFile); err != nil { return err @@ -754,7 +778,7 @@ func (s *scanJob) fireHandlers(ctx context.Context, f File, oldFile File) error return nil } -func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExisting bool) (Fingerprints, error) { +func (s *scanJob) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { // only log if we're (re)calculating fingerprints if !useExisting { logger.Infof("Calculating fingerprints for %s ...", path) @@ -772,7 +796,7 @@ func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExis return fp, nil } -func appendFileUnique(v []File, toAdd []File) []File { +func appendFileUnique(v []models.File, toAdd []models.File) []models.File { for _, f := range toAdd { found := false id := f.Base().ID @@ -791,7 +815,7 @@ func appendFileUnique(v []File, toAdd []File) []File { return v } -func (s *scanJob) getFileFS(f *BaseFile) (FS, error) { +func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { if f.ZipFile == nil { return s.FS, nil } @@ -805,11 +829,11 @@ func (s *scanJob) getFileFS(f *BaseFile) (FS, error) { return fs.OpenZip(zipPath) } -func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (File, error) { - var others []File +func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) { + var others []models.File for _, tfp := range fp { - thisOthers, err := s.Repository.FindByFingerprint(ctx, tfp) + thisOthers, err := s.Repository.FileStore.FindByFingerprint(ctx, tfp) if err != nil { return nil, fmt.Errorf("getting files by fingerprint %v: %w", tfp, err) } @@ -817,7 +841,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F others = appendFileUnique(others, thisOthers) } - var missing []File + var missing []models.File fZipID := f.Base().ZipFileID for _, other := range others { @@ -867,7 +891,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F fBase.Fingerprints = otherBase.Fingerprints if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, f); err != nil { + if err := s.Repository.FileStore.Update(ctx, f); err != nil { return fmt.Errorf("updating file for rename %q: %w", fBase.Path, err) } @@ -889,7 +913,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F return f, nil } -func (s *scanJob) isHandlerRequired(ctx context.Context, f File) bool { +func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { accept := len(s.options.HandlerRequiredFilters) == 0 for _, filter := range s.options.HandlerRequiredFilters { // accept if any filter accepts the file @@ -910,7 +934,7 @@ func (s *scanJob) isHandlerRequired(ctx context.Context, f File) bool { // - file size // - image format, width or height // - video codec, audio codec, format, width, height, framerate or bitrate -func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing File) bool { +func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing models.File) bool { for _, h := range s.FileDecorators { if h.IsMissingMetadata(ctx, f.fs, existing) { return true @@ -920,7 +944,7 @@ func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing Fi return false } -func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing models.File) (models.File, error) { path := existing.Base().Path logger.Infof("Updating metadata for %s", path) @@ -934,7 +958,7 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F // queue file for update if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -946,7 +970,7 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F return existing, nil } -func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing models.File) (models.File, error) { const useExisting = true fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting) if err != nil { @@ -957,7 +981,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi existing.SetFingerprints(fp) if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", f.Path, err) } @@ -971,7 +995,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi } // returns a file only if it was updated -func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { base := existing.Base() path := base.Path @@ -1006,7 +1030,7 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) // queue file for update if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1022,21 +1046,21 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) return existing, nil } -func (s *scanJob) removeOutdatedFingerprints(existing File, fp Fingerprints) { +func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { // HACK - if no MD5 fingerprint was returned, and the oshash is changed // then remove the MD5 fingerprint - oshash := fp.For(FingerprintTypeOshash) + oshash := fp.For(models.FingerprintTypeOshash) if oshash == nil { return } - existingOshash := existing.Base().Fingerprints.For(FingerprintTypeOshash) + existingOshash := existing.Base().Fingerprints.For(models.FingerprintTypeOshash) if existingOshash == nil || *existingOshash == *oshash { // missing oshash or same oshash - nothing to do return } - md5 := fp.For(FingerprintTypeMD5) + md5 := fp.For(models.FingerprintTypeMD5) if md5 != nil { // nothing to do @@ -1045,11 +1069,11 @@ func (s *scanJob) removeOutdatedFingerprints(existing File, fp Fingerprints) { // oshash has changed, MD5 is missing - remove MD5 from the existing fingerprints logger.Infof("Removing outdated checksum from %s", existing.Base().Path) - existing.Base().Fingerprints.Remove(FingerprintTypeMD5) + existing.Base().Fingerprints.Remove(models.FingerprintTypeMD5) } // returns a file only if it was updated -func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { var err error isMissingMetdata := s.isMissingMetadata(ctx, f, existing) diff --git a/pkg/file/video/caption.go b/pkg/file/video/caption.go index d2f8e79a5b5..bec3db6fd64 100644 --- a/pkg/file/video/caption.go +++ b/pkg/file/video/caption.go @@ -9,7 +9,6 @@ import ( "strings" "github.com/asticode/go-astisub" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -87,12 +86,12 @@ func getCaptionsLangFromPath(captionPath string) string { } type CaptionUpdater interface { - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) - UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error + GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) + UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error } // associates captions to scene/s with the same basename -func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb file.Getter, w CaptionUpdater) { +func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) { captionLang := getCaptionsLangFromPath(captionPath) captionPrefix := getCaptionPrefix(captionPath) @@ -108,7 +107,7 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag // found some files // filter out non video files switch f.(type) { - case *file.VideoFile: + case *models.VideoFile: break default: continue @@ -143,7 +142,7 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag } // CleanCaptions removes non existent/accessible language codes from captions -func CleanCaptions(ctx context.Context, f *file.VideoFile, txnMgr txn.Manager, w CaptionUpdater) error { +func CleanCaptions(ctx context.Context, f *models.VideoFile, txnMgr txn.Manager, w CaptionUpdater) error { captions, err := w.GetCaptions(ctx, f.ID) if err != nil { return fmt.Errorf("getting captions for file %s: %w", f.Path, err) diff --git a/pkg/file/video/scan.go b/pkg/file/video/scan.go index 1f3d7817f35..ca7d0be963a 100644 --- a/pkg/file/video/scan.go +++ b/pkg/file/video/scan.go @@ -7,6 +7,7 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // Decorator adds video specific fields to a File. @@ -14,7 +15,7 @@ type Decorator struct { FFProbe ffmpeg.FFProbe } -func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) { +func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { if d.FFProbe == "" { return f, errors.New("ffprobe not configured") } @@ -42,7 +43,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file interactive = true } - return &file.VideoFile{ + return &models.VideoFile{ BaseFile: base, Format: string(container), VideoCodec: videoFile.VideoCodec, @@ -56,13 +57,13 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file }, nil } -func (d *Decorator) IsMissingMetadata(ctx context.Context, fs file.FS, f file.File) bool { +func (d *Decorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { const ( unsetString = "unset" unsetNumber = -1 ) - vf, ok := f.(*file.VideoFile) + vf, ok := f.(*models.VideoFile) if !ok { return true } diff --git a/pkg/file/video_file.go b/pkg/file/video_file.go deleted file mode 100644 index 382c81e199c..00000000000 --- a/pkg/file/video_file.go +++ /dev/null @@ -1,29 +0,0 @@ -package file - -// VideoFile is an extension of BaseFile to represent video files. -type VideoFile struct { - *BaseFile - Format string `json:"format"` - Width int `json:"width"` - Height int `json:"height"` - Duration float64 `json:"duration"` - VideoCodec string `json:"video_codec"` - AudioCodec string `json:"audio_codec"` - FrameRate float64 `json:"frame_rate"` - BitRate int64 `json:"bitrate"` - - Interactive bool `json:"interactive"` - InteractiveSpeed *int `json:"interactive_speed"` -} - -func (f VideoFile) GetWidth() int { - return f.Width -} - -func (f VideoFile) GetHeight() int { - return f.Height -} - -func (f VideoFile) GetFormat() string { - return f.Format -} diff --git a/pkg/file/walk.go b/pkg/file/walk.go index a73781d4548..3c6a157b758 100644 --- a/pkg/file/walk.go +++ b/pkg/file/walk.go @@ -6,6 +6,8 @@ import ( "os" "path/filepath" "sort" + + "github.com/stashapp/stash/pkg/models" ) // Modified from github.com/facebookgo/symwalk @@ -48,7 +50,7 @@ import ( // // Note that symwalk.Walk does not terminate if there are any non-terminating loops in // the file structure. -func walkSym(f FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) error { +func walkSym(f models.FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) error { symWalkFunc := func(path string, info fs.DirEntry, err error) error { if fname, err := filepath.Rel(filename, path); err == nil { @@ -80,7 +82,7 @@ func walkSym(f FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) e } // symWalk extends filepath.Walk to also follow symlinks -func symWalk(fs FS, path string, walkFn fs.WalkDirFunc) error { +func symWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { return walkSym(fs, path, path, walkFn) } @@ -93,7 +95,7 @@ func (d *statDirEntry) IsDir() bool { return d.info.IsDir() } func (d *statDirEntry) Type() fs.FileMode { return d.info.Mode().Type() } func (d *statDirEntry) Info() (fs.FileInfo, error) { return d.info, nil } -func fsWalk(f FS, root string, fn fs.WalkDirFunc) error { +func fsWalk(f models.FS, root string, fn fs.WalkDirFunc) error { info, err := f.Lstat(root) if err != nil { err = fn(root, nil, err) @@ -106,7 +108,7 @@ func fsWalk(f FS, root string, fn fs.WalkDirFunc) error { return err } -func walkDir(f FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { +func walkDir(f models.FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { if err := walkDirFn(path, d, nil); err != nil || !d.IsDir() { if errors.Is(err, fs.SkipDir) && d.IsDir() { // Successfully skipped directory. @@ -143,7 +145,7 @@ func walkDir(f FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { // readDir reads the directory named by dirname and returns // a sorted list of directory entries. -func readDir(fs FS, dirname string) ([]fs.DirEntry, error) { +func readDir(fs models.FS, dirname string) ([]fs.DirEntry, error) { f, err := fs.Open(dirname) if err != nil { return nil, err diff --git a/pkg/file/zip.go b/pkg/file/zip.go index 5cef1184ef1..a17b596852f 100644 --- a/pkg/file/zip.go +++ b/pkg/file/zip.go @@ -10,6 +10,7 @@ import ( "path/filepath" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/xWTF/chardet" "golang.org/x/net/html/charset" @@ -22,14 +23,14 @@ var ( ) // ZipFS is a file system backed by a zip file. -type ZipFS struct { +type zipFS struct { *zip.Reader zipFileCloser io.Closer zipInfo fs.FileInfo zipPath string } -func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { +func newZipFS(fs models.FS, path string, info fs.FileInfo) (*zipFS, error) { reader, err := fs.Open(path) if err != nil { return nil, err @@ -85,7 +86,7 @@ func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { } } - return &ZipFS{ + return &zipFS{ Reader: zipReader, zipFileCloser: reader, zipInfo: info, @@ -93,7 +94,7 @@ func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { }, nil } -func (f *ZipFS) rel(name string) (string, error) { +func (f *zipFS) rel(name string) (string, error) { if f.zipPath == name { return ".", nil } @@ -110,7 +111,7 @@ func (f *ZipFS) rel(name string) (string, error) { return relName, nil } -func (f *ZipFS) Stat(name string) (fs.FileInfo, error) { +func (f *zipFS) Stat(name string) (fs.FileInfo, error) { reader, err := f.Open(name) if err != nil { return nil, err @@ -120,15 +121,15 @@ func (f *ZipFS) Stat(name string) (fs.FileInfo, error) { return reader.Stat() } -func (f *ZipFS) Lstat(name string) (fs.FileInfo, error) { +func (f *zipFS) Lstat(name string) (fs.FileInfo, error) { return f.Stat(name) } -func (f *ZipFS) OpenZip(name string) (*ZipFS, error) { +func (f *zipFS) OpenZip(name string) (models.ZipFS, error) { return nil, errZipFSOpenZip } -func (f *ZipFS) IsPathCaseSensitive(path string) (bool, error) { +func (f *zipFS) IsPathCaseSensitive(path string) (bool, error) { return true, nil } @@ -145,7 +146,7 @@ func (f *zipReadDirFile) ReadDir(n int) ([]fs.DirEntry, error) { return asReadDirFile.ReadDir(n) } -func (f *ZipFS) Open(name string) (fs.ReadDirFile, error) { +func (f *zipFS) Open(name string) (fs.ReadDirFile, error) { relName, err := f.rel(name) if err != nil { return nil, err @@ -161,12 +162,12 @@ func (f *ZipFS) Open(name string) (fs.ReadDirFile, error) { }, nil } -func (f *ZipFS) Close() error { +func (f *zipFS) Close() error { return f.zipFileCloser.Close() } // openOnly returns a ReadCloser where calling Close will close the zip fs as well. -func (f *ZipFS) OpenOnly(name string) (io.ReadCloser, error) { +func (f *zipFS) OpenOnly(name string) (io.ReadCloser, error) { r, err := f.Open(name) if err != nil { return nil, err diff --git a/pkg/fsutil/file.go b/pkg/fsutil/file.go index 1bf98266675..0c0eb52715f 100644 --- a/pkg/fsutil/file.go +++ b/pkg/fsutil/file.go @@ -20,7 +20,7 @@ func CopyFile(srcpath, dstpath string) (err error) { return err } - w, err := os.OpenFile(dstpath, os.O_CREATE|os.O_EXCL, 0666) + w, err := os.OpenFile(dstpath, os.O_CREATE|os.O_WRONLY|os.O_EXCL, 0666) if err != nil { r.Close() // We need to close the input file as the defer below would not be called. return err @@ -59,9 +59,9 @@ func SafeMove(src, dst string) error { err := os.Rename(src, dst) if err != nil { - err = CopyFile(src, dst) - if err != nil { - return err + copyErr := CopyFile(src, dst) + if copyErr != nil { + return fmt.Errorf("copying file during SaveMove failed with: '%w'; renaming file failed previously with: '%v'", copyErr, err) } err = os.Remove(src) diff --git a/pkg/gallery/chapter_import.go b/pkg/gallery/chapter_import.go index 91abe909de0..ee223b1aa9f 100644 --- a/pkg/gallery/chapter_import.go +++ b/pkg/gallery/chapter_import.go @@ -8,15 +8,14 @@ import ( "github.com/stashapp/stash/pkg/models/jsonschema" ) -type ChapterCreatorUpdater interface { - Create(ctx context.Context, newGalleryChapter *models.GalleryChapter) error - Update(ctx context.Context, updatedGalleryChapter *models.GalleryChapter) error +type ChapterImporterReaderWriter interface { + models.GalleryChapterCreatorUpdater FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) } type ChapterImporter struct { GalleryID int - ReaderWriter ChapterCreatorUpdater + ReaderWriter ChapterImporterReaderWriter Input jsonschema.GalleryChapter MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/gallery/delete.go b/pkg/gallery/delete.go index 60aee0d2853..5609b2f4bac 100644 --- a/pkg/gallery/delete.go +++ b/pkg/gallery/delete.go @@ -41,12 +41,7 @@ func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *i return imgsDestroyed, nil } -type ChapterDestroyer interface { - FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) - Destroy(ctx context.Context, id int) error -} - -func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, qb ChapterDestroyer) error { +func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, qb models.GalleryChapterDestroyer) error { return qb.Destroy(ctx, galleryChapter.ID) } diff --git a/pkg/gallery/export.go b/pkg/gallery/export.go index d53a2a8e585..83f3c31cebc 100644 --- a/pkg/gallery/export.go +++ b/pkg/gallery/export.go @@ -7,13 +7,8 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" ) -type ChapterFinder interface { - FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) -} - // ToBasicJSON converts a gallery object into its JSON object equivalent. It // does not convert the relationships to other objects. func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { @@ -48,7 +43,7 @@ func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { // GetStudioName returns the name of the provided gallery's studio. It returns an // empty string if there is no studio assigned to the gallery. -func GetStudioName(ctx context.Context, reader studio.Finder, gallery *models.Gallery) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, gallery *models.Gallery) (string, error) { if gallery.StudioID != nil { studio, err := reader.Find(ctx, *gallery.StudioID) if err != nil { @@ -65,7 +60,7 @@ func GetStudioName(ctx context.Context, reader studio.Finder, gallery *models.Ga // GetGalleryChaptersJSON returns a slice of GalleryChapter JSON representation // objects corresponding to the provided gallery's chapters. -func GetGalleryChaptersJSON(ctx context.Context, chapterReader ChapterFinder, gallery *models.Gallery) ([]jsonschema.GalleryChapter, error) { +func GetGalleryChaptersJSON(ctx context.Context, chapterReader models.GalleryChapterFinder, gallery *models.Gallery) ([]jsonschema.GalleryChapter, error) { galleryChapters, err := chapterReader.FindByGalleryID(ctx, gallery.ID) if err != nil { return nil, fmt.Errorf("error getting gallery chapters: %v", err) diff --git a/pkg/gallery/export_test.go b/pkg/gallery/export_test.go index fcd90b9e98c..3a6ffa2ec55 100644 --- a/pkg/gallery/export_test.go +++ b/pkg/gallery/export_test.go @@ -3,7 +3,6 @@ package gallery import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -50,8 +49,8 @@ var ( func createFullGallery(id int) models.Gallery { return models.Gallery{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), @@ -69,8 +68,8 @@ func createFullGallery(id int) models.Gallery { func createEmptyGallery(id int) models.Gallery { return models.Gallery{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index ccb258eb0a1..9c892d3b9a9 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -5,22 +5,25 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) +type ImporterReaderWriter interface { + models.GalleryCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) + FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) +} + type Importer struct { - ReaderWriter FullCreatorUpdater - StudioWriter studio.NameFinderCreator - PerformerWriter performer.NameFinderCreator - TagWriter tag.NameFinderCreator - FileFinder file.Getter - FolderFinder file.FolderGetter + ReaderWriter ImporterReaderWriter + StudioWriter models.StudioFinderCreator + PerformerWriter models.PerformerFinderCreator + TagWriter models.TagFinderCreator + FileFinder models.FileFinder + FolderFinder models.FolderFinder Input jsonschema.Gallery MissingRefBehaviour models.ImportMissingRefEnum @@ -28,11 +31,6 @@ type Importer struct { gallery models.Gallery } -type FullCreatorUpdater interface { - FinderCreatorUpdater - Update(ctx context.Context, updatedGallery *models.Gallery) error -} - func (i *Importer) PreImport(ctx context.Context) error { i.gallery = i.galleryJSONToGallery(i.Input) @@ -119,11 +117,10 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := &models.Studio{ - Name: name, - } + newStudio := models.NewStudio() + newStudio.Name = name - err := i.StudioWriter.Create(ctx, newStudio) + err := i.StudioWriter.Create(ctx, &newStudio) if err != nil { return 0, err } @@ -179,7 +176,8 @@ func (i *Importer) populatePerformers(ctx context.Context) error { func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*models.Performer, error) { var ret []*models.Performer for _, name := range names { - newPerformer := *models.NewPerformer(name) + newPerformer := models.NewPerformer() + newPerformer.Name = name err := i.PerformerWriter.Create(ctx, &newPerformer) if err != nil { @@ -237,21 +235,22 @@ func (i *Importer) populateTags(ctx context.Context) error { func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { - newTag := models.NewTag(name) + newTag := models.NewTag() + newTag.Name = name - err := i.TagWriter.Create(ctx, newTag) + err := i.TagWriter.Create(ctx, &newTag) if err != nil { return nil, err } - ret = append(ret, newTag) + ret = append(ret, &newTag) } return ret, nil } func (i *Importer) populateFilesFolder(ctx context.Context) error { - files := make([]file.File, 0) + files := make([]models.File, 0) for _, ref := range i.Input.ZipFiles { path := ref @@ -340,7 +339,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.gallery.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index bfbdefa9e42..0997b4a57e2 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -6,7 +6,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -68,7 +67,7 @@ func TestImporterPreImport(t *testing.T) { Rating: &rating, Organized: organized, URL: url, - Files: models.NewRelatedFiles([]file.File{}), + Files: models.NewRelatedFiles([]models.File{}), TagIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}), CreatedAt: createdAt, diff --git a/pkg/gallery/query.go b/pkg/gallery/query.go index cc2a043d757..da0b5f0c101 100644 --- a/pkg/gallery/query.go +++ b/pkg/gallery/query.go @@ -4,27 +4,10 @@ import ( "context" "strconv" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) ([]*models.Gallery, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error) -} - -type Finder interface { - FindByPath(ctx context.Context, p string) ([]*models.Gallery, error) - FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) -} - -func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByPerformerID(ctx context.Context, r models.GalleryQueryer, id int) (int, error) { filter := &models.GalleryFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -35,7 +18,7 @@ func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error return r.QueryCount(ctx, filter, nil) } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.GalleryQueryer, id int, depth *int) (int, error) { filter := &models.GalleryFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -47,7 +30,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.GalleryQueryer, id int, depth *int) (int, error) { filter := &models.GalleryFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/gallery/scan.go b/pkg/gallery/scan.go index 8a35890eea8..f4a9adcc5c5 100644 --- a/pkg/gallery/scan.go +++ b/pkg/gallery/scan.go @@ -5,41 +5,41 @@ import ( "fmt" "path/filepath" "strings" - "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" ) -type FinderCreatorUpdater interface { - Finder - Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) + GetFiles(ctx context.Context, relatedID int) ([]models.File, error) + + Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.FileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } -type SceneFinderUpdater interface { +type ScanSceneFinderUpdater interface { FindByPath(ctx context.Context, p string) ([]*models.Scene, error) Update(ctx context.Context, updatedScene *models.Scene) error AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error } -type ImageFinderUpdater interface { - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) +type ScanImageFinderUpdater interface { + FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) } type ScanHandler struct { - CreatorUpdater FullCreatorUpdater - SceneFinderUpdater SceneFinderUpdater - ImageFinderUpdater ImageFinderUpdater + CreatorUpdater ScanCreatorUpdater + SceneFinderUpdater ScanSceneFinderUpdater + ImageFinderUpdater ScanImageFinderUpdater PluginCache *plugin.Cache } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { baseFile := f.Base() // try to match the file to a gallery @@ -75,15 +75,11 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File } // create a new gallery - now := time.Now() - newGallery := &models.Gallery{ - CreatedAt: now, - UpdatedAt: now, - } + newGallery := models.NewGallery() logger.Infof("%s doesn't exist. Creating new gallery...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, newGallery, []file.ID{baseFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, &newGallery, []models.FileID{baseFile.ID}); err != nil { return fmt.Errorf("creating new gallery: %w", err) } @@ -91,18 +87,21 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File // associate all the images in the zip file with the gallery for _, i := range images { - if _, err := h.ImageFinderUpdater.UpdatePartial(ctx, i.ID, models.ImagePartial{ + imagePartial := models.ImagePartial{ GalleryIDs: &models.UpdateIDs{ IDs: []int{newGallery.ID}, Mode: models.RelationshipUpdateModeAdd, }, - UpdatedAt: models.NewOptionalTime(now), - }); err != nil { + // set UpdatedAt directly instead of using NewImagePartial, to ensure + // that the images have the same UpdatedAt time as the gallery + UpdatedAt: models.NewOptionalTime(newGallery.UpdatedAt), + } + if _, err := h.ImageFinderUpdater.UpdatePartial(ctx, i.ID, imagePartial); err != nil { return fmt.Errorf("adding image %s to gallery: %w", i.Path, err) } } - existing = []*models.Gallery{newGallery} + existing = []*models.Gallery{&newGallery} } if err := h.associateScene(ctx, existing, f); err != nil { @@ -112,7 +111,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f file.File, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f models.File, updateExisting bool) error { for _, i := range existing { if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -146,7 +145,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return nil } -func (h *ScanHandler) associateScene(ctx context.Context, existing []*models.Gallery, f file.File) error { +func (h *ScanHandler) associateScene(ctx context.Context, existing []*models.Gallery, f models.File) error { galleryIDs := make([]int, len(existing)) for i, g := range existing { galleryIDs[i] = g.ID diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index 7dfc3857f5d..6db604fc4d6 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -3,50 +3,25 @@ package gallery import ( "context" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) -} - -type Repository interface { - models.GalleryFinder - FinderByFile - Destroy(ctx context.Context, id int) error - models.FileLoader - ImageUpdater - PartialUpdater -} - -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) -} - type ImageFinder interface { - FindByFolderID(ctx context.Context, folder file.FolderID) ([]*models.Image, error) - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) + FindByFolderID(ctx context.Context, folder models.FolderID) ([]*models.Image, error) + FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) models.GalleryIDLoader } type ImageService interface { Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error - DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) -} - -type ChapterRepository interface { - ChapterFinder - ChapterDestroyer - - Update(ctx context.Context, updatedObject models.GalleryChapter) (*models.GalleryChapter, error) + DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } type Service struct { - Repository Repository + Repository models.GalleryReaderWriter ImageFinder ImageFinder ImageService ImageService - File file.Store - Folder file.FolderStore + File models.FileReaderWriter + Folder models.FolderReaderWriter } diff --git a/pkg/gallery/update.go b/pkg/gallery/update.go index 72f479bea99..d66da197c81 100644 --- a/pkg/gallery/update.go +++ b/pkg/gallery/update.go @@ -3,7 +3,6 @@ package gallery import ( "context" "fmt" - "time" "github.com/stashapp/stash/pkg/models" ) @@ -15,9 +14,8 @@ type ImageUpdater interface { } func (s *Service) Updated(ctx context.Context, galleryID int) error { - _, err := s.Repository.UpdatePartial(ctx, galleryID, models.GalleryPartial{ - UpdatedAt: models.NewOptionalTime(time.Now()), - }) + galleryPartial := models.NewGalleryPartial() + _, err := s.Repository.UpdatePartial(ctx, galleryID, galleryPartial) return err } @@ -54,22 +52,22 @@ func (s *Service) RemoveImages(ctx context.Context, g *models.Gallery, toRemove return s.Updated(ctx, g.ID) } -func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, performerID int) error { - _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }) +func AddPerformer(ctx context.Context, qb models.GalleryUpdater, o *models.Gallery, performerID int) error { + galleryPartial := models.NewGalleryPartial() + galleryPartial.PerformerIDs = &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + } + _, err := qb.UpdatePartial(ctx, o.ID, galleryPartial) return err } -func AddTag(ctx context.Context, qb PartialUpdater, o *models.Gallery, tagID int) error { - _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }) +func AddTag(ctx context.Context, qb models.GalleryUpdater, o *models.Gallery, tagID int) error { + galleryPartial := models.NewGalleryPartial() + galleryPartial.TagIDs = &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + } + _, err := qb.UpdatePartial(ctx, o.ID, galleryPartial) return err } diff --git a/pkg/hash/videophash/phash.go b/pkg/hash/videophash/phash.go index 0cbefc2ae41..6e586b17869 100644 --- a/pkg/hash/videophash/phash.go +++ b/pkg/hash/videophash/phash.go @@ -13,8 +13,8 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg/transcoder" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) const ( @@ -23,7 +23,7 @@ const ( rows = 5 ) -func Generate(encoder *ffmpeg.FFMpeg, videoFile *file.VideoFile) (*uint64, error) { +func Generate(encoder *ffmpeg.FFMpeg, videoFile *models.VideoFile) (*uint64, error) { sprite, err := generateSprite(encoder, videoFile) if err != nil { return nil, err @@ -76,7 +76,7 @@ func combineImages(images []image.Image) image.Image { return montage } -func generateSprite(encoder *ffmpeg.FFMpeg, videoFile *file.VideoFile) (image.Image, error) { +func generateSprite(encoder *ffmpeg.FFMpeg, videoFile *models.VideoFile) (image.Image, error) { logger.Infof("[generator] generating phash sprite for %s", videoFile.Path) // Generate sprite image offset by 5% on each end to avoid intro/outros diff --git a/pkg/image/delete.go b/pkg/image/delete.go index 78ef4b09ab6..89f4c181153 100644 --- a/pkg/image/delete.go +++ b/pkg/image/delete.go @@ -10,10 +10,6 @@ import ( "github.com/stashapp/stash/pkg/models/paths" ) -type Destroyer interface { - Destroy(ctx context.Context, id int) error -} - // FileDeleter is an extension of file.Deleter that handles deletion of image files. type FileDeleter struct { *file.Deleter @@ -45,7 +41,7 @@ func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *Fil // DestroyZipImages destroys all images in zip, optionally marking the files and generated files for deletion. // Returns a slice of images that were destroyed. -func (s *Service) DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *FileDeleter, deleteGenerated bool) ([]*models.Image, error) { +func (s *Service) DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *FileDeleter, deleteGenerated bool) ([]*models.Image, error) { var imgsDestroyed []*models.Image imgs, err := s.Repository.FindByZipFileID(ctx, zipFile.Base().ID) diff --git a/pkg/image/export.go b/pkg/image/export.go index d67351e8dfb..41eac446fe2 100644 --- a/pkg/image/export.go +++ b/pkg/image/export.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" ) // ToBasicJSON converts a image object into its JSON object equivalent. It @@ -15,7 +14,7 @@ import ( func ToBasicJSON(image *models.Image) *jsonschema.Image { newImageJSON := jsonschema.Image{ Title: image.Title, - URL: image.URL, + URLs: image.URLs.List(), CreatedAt: json.JSONTime{Time: image.CreatedAt}, UpdatedAt: json.JSONTime{Time: image.UpdatedAt}, } @@ -38,22 +37,9 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image { return &newImageJSON } -// func getImageFileJSON(image *models.Image) *jsonschema.ImageFile { -// ret := &jsonschema.ImageFile{} - -// f := image.PrimaryFile() - -// ret.ModTime = json.JSONTime{Time: f.ModTime} -// ret.Size = f.Size -// ret.Width = f.Width -// ret.Height = f.Height - -// return ret -// } - // GetStudioName returns the name of the provided image's studio. It returns an // empty string if there is no studio assigned to the image. -func GetStudioName(ctx context.Context, reader studio.Finder, image *models.Image) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, image *models.Image) (string, error) { if image.StudioID != nil { studio, err := reader.Find(ctx, *image.StudioID) if err != nil { diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go index 4c46aae9578..1a5897271ef 100644 --- a/pkg/image/export_test.go +++ b/pkg/image/export_test.go @@ -3,7 +3,6 @@ package image import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -45,8 +44,8 @@ var ( func createFullImage(id int) models.Image { return models.Image{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), @@ -54,7 +53,7 @@ func createFullImage(id int) models.Image { OCounter: ocounter, Rating: &rating, Date: &dateObj, - URL: url, + URLs: models.NewRelatedStrings([]string{url}), Organized: organized, CreatedAt: createTime, UpdatedAt: updateTime, @@ -67,7 +66,7 @@ func createFullJSONImage() *jsonschema.Image { OCounter: ocounter, Rating: rating, Date: date, - URL: url, + URLs: []string{url}, Organized: organized, Files: []string{path}, CreatedAt: json.JSONTime{ diff --git a/pkg/image/import.go b/pkg/image/import.go index 3c1e7ac8b53..8b90fa8a7c6 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -5,13 +5,9 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) type GalleryFinder interface { @@ -19,18 +15,18 @@ type GalleryFinder interface { FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) } -type FullCreatorUpdater interface { - FinderCreatorUpdater - Update(ctx context.Context, updatedImage *models.Image) error +type ImporterReaderWriter interface { + models.ImageCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) } type Importer struct { - ReaderWriter FullCreatorUpdater - FileFinder file.Getter - StudioWriter studio.NameFinderCreator + ReaderWriter ImporterReaderWriter + FileFinder models.FileFinder + StudioWriter models.StudioFinderCreator GalleryFinder GalleryFinder - PerformerWriter performer.NameFinderCreator - TagWriter tag.NameFinderCreator + PerformerWriter models.PerformerFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.Image MissingRefBehaviour models.ImportMissingRefEnum @@ -66,8 +62,6 @@ func (i *Importer) PreImport(ctx context.Context) error { func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image { newImage := models.Image{ - // Checksum: imageJSON.Checksum, - // Path: i.Path, PerformerIDs: models.NewRelatedIDs([]int{}), TagIDs: models.NewRelatedIDs([]int{}), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -85,9 +79,12 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image { if imageJSON.Rating != 0 { newImage.Rating = &imageJSON.Rating } - if imageJSON.URL != "" { - newImage.URL = imageJSON.URL + if len(imageJSON.URLs) > 0 { + newImage.URLs = models.NewRelatedStrings(imageJSON.URLs) + } else if imageJSON.URL != "" { + newImage.URLs = models.NewRelatedStrings([]string{imageJSON.URL}) } + if imageJSON.Date != "" { d, err := models.ParseDate(imageJSON.Date) if err == nil { @@ -99,7 +96,7 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image { } func (i *Importer) populateFiles(ctx context.Context) error { - files := make([]file.File, 0) + files := make([]models.File, 0) for _, ref := range i.Input.Files { path := ref @@ -152,11 +149,10 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := &models.Studio{ - Name: name, - } + newStudio := models.NewStudio() + newStudio.Name = name - err := i.StudioWriter.Create(ctx, newStudio) + err := i.StudioWriter.Create(ctx, &newStudio) if err != nil { return 0, err } @@ -265,7 +261,8 @@ func (i *Importer) populatePerformers(ctx context.Context) error { func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*models.Performer, error) { var ret []*models.Performer for _, name := range names { - newPerformer := *models.NewPerformer(name) + newPerformer := models.NewPerformer() + newPerformer.Name = name err := i.PerformerWriter.Create(ctx, &newPerformer) if err != nil { @@ -330,15 +327,12 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.image.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } - err := i.ReaderWriter.Create(ctx, &models.ImageCreateInput{ - Image: &i.image, - FileIDs: fileIDs, - }) + err := i.ReaderWriter.Create(ctx, &i.image, fileIDs) if err != nil { return nil, fmt.Errorf("error creating image: %v", err) } @@ -360,7 +354,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -395,17 +389,18 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { - newTag := models.NewTag(name) + newTag := models.NewTag() + newTag.Name = name - err := tagWriter.Create(ctx, newTag) + err := tagWriter.Create(ctx, &newTag) if err != nil { return nil, err } - ret = append(ret, newTag) + ret = append(ret, &newTag) } return ret, nil diff --git a/pkg/image/query.go b/pkg/image/query.go index 85d1df05c25..a5c9a17322f 100644 --- a/pkg/image/query.go +++ b/pkg/image/query.go @@ -7,14 +7,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (int, error) -} - // QueryOptions returns a ImageQueryResult populated with the provided filters. func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType, count bool) models.ImageQueryOptions { return models.ImageQueryOptions{ @@ -27,7 +19,7 @@ func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFi } // Query queries for images using the provided filters. -func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { +func Query(ctx context.Context, qb models.ImageQueryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { result, err := qb.Query(ctx, QueryOptions(imageFilter, findFilter, false)) if err != nil { return nil, err @@ -41,7 +33,7 @@ func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, return images, nil } -func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByPerformerID(ctx context.Context, r models.ImageQueryer, id int) (int, error) { filter := &models.ImageFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -52,7 +44,7 @@ func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error return r.QueryCount(ctx, filter, nil) } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -64,7 +56,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -76,7 +68,7 @@ func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, return r.QueryCount(ctx, filter, nil) } -func FindByGalleryID(ctx context.Context, r Queryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { +func FindByGalleryID(ctx context.Context, r models.ImageQueryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { perPage := -1 findFilter := models.FindFilterType{ @@ -99,7 +91,7 @@ func FindByGalleryID(ctx context.Context, r Queryer, galleryID int, sortBy strin }, &findFilter) } -func FindGalleryCover(ctx context.Context, r Queryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { +func FindGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { const useCoverJpg = true img, err := findGalleryCover(ctx, r, galleryID, useCoverJpg, galleryCoverRegex) if err != nil { @@ -114,7 +106,7 @@ func FindGalleryCover(ctx context.Context, r Queryer, galleryID int, galleryCove return findGalleryCover(ctx, r, galleryID, !useCoverJpg, galleryCoverRegex) } -func findGalleryCover(ctx context.Context, r Queryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { +func findGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { // try to find cover.jpg in the gallery perPage := 1 sortBy := "path" diff --git a/pkg/image/scan.go b/pkg/image/scan.go index d28d94a86c0..9f4aa0d57e9 100644 --- a/pkg/image/scan.go +++ b/pkg/image/scan.go @@ -6,9 +6,7 @@ import ( "fmt" "os" "path/filepath" - "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" @@ -21,21 +19,22 @@ var ( ErrNotImageFile = errors.New("not an image file") ) -type FinderCreatorUpdater interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) - Create(ctx context.Context, newImage *models.ImageCreateInput) error +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Image, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) + GetFiles(ctx context.Context, relatedID int) ([]models.File, error) + GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) + + Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedImage models.ImagePartial) (*models.Image, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.GalleryIDLoader - models.FileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } type GalleryFinderCreator interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) - Create(ctx context.Context, newObject *models.Gallery, fileIDs []file.ID) error + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) + Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) } @@ -44,11 +43,11 @@ type ScanConfig interface { } type ScanGenerator interface { - Generate(ctx context.Context, i *models.Image, f file.File) error + Generate(ctx context.Context, i *models.Image, f models.File) error } type ScanHandler struct { - CreatorUpdater FinderCreatorUpdater + CreatorUpdater ScanCreatorUpdater GalleryFinder GalleryFinderCreator ScanGenerator ScanGenerator @@ -80,7 +79,7 @@ func (h *ScanHandler) validate() error { return nil } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if err := h.validate(); err != nil { return err } @@ -109,16 +108,12 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File } } else { // create a new image - now := time.Now() - newImage := &models.Image{ - CreatedAt: now, - UpdatedAt: now, - GalleryIDs: models.NewRelatedIDs([]int{}), - } + newImage := models.NewImage() + newImage.GalleryIDs = models.NewRelatedIDs([]int{}) logger.Infof("%s doesn't exist. Creating new image...", f.Base().Path) - g, err := h.getGalleryToAssociate(ctx, newImage, f) + g, err := h.getGalleryToAssociate(ctx, &newImage, f) if err != nil { return err } @@ -128,31 +123,29 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File logger.Infof("Adding %s to gallery %s", f.Base().Path, g.Path) } - if err := h.CreatorUpdater.Create(ctx, &models.ImageCreateInput{ - Image: newImage, - FileIDs: []file.ID{imageFile.ID}, - }); err != nil { + if err := h.CreatorUpdater.Create(ctx, &newImage, []models.FileID{imageFile.ID}); err != nil { return fmt.Errorf("creating new image: %w", err) } // update the gallery updated at timestamp if applicable if g != nil { - if _, err := h.GalleryFinder.UpdatePartial(ctx, g.ID, models.GalleryPartial{ - UpdatedAt: models.NewOptionalTime(time.Now()), - }); err != nil { + galleryPartial := models.GalleryPartial{ + UpdatedAt: models.NewOptionalTime(newImage.UpdatedAt), + } + if _, err := h.GalleryFinder.UpdatePartial(ctx, g.ID, galleryPartial); err != nil { return fmt.Errorf("updating gallery updated at timestamp: %w", err) } } h.PluginCache.RegisterPostHooks(ctx, newImage.ID, plugin.ImageCreatePost, nil, nil) - existing = []*models.Image{newImage} + existing = []*models.Image{&newImage} } // remove the old thumbnail if the checksum changed - we'll regenerate it if oldFile != nil { - oldHash := oldFile.Base().Fingerprints.GetString(file.FingerprintTypeMD5) - newHash := f.Base().Fingerprints.GetString(file.FingerprintTypeMD5) + oldHash := oldFile.Base().Fingerprints.GetString(models.FingerprintTypeMD5) + newHash := f.Base().Fingerprints.GetString(models.FingerprintTypeMD5) if oldHash != "" && newHash != "" && oldHash != newHash { // remove cache dir of gallery @@ -173,7 +166,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.BaseFile, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *models.BaseFile, updateExisting bool) error { for _, i := range existing { if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -215,17 +208,20 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. if changed { // always update updated_at time - if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, models.ImagePartial{ - GalleryIDs: galleryIDs, - UpdatedAt: models.NewOptionalTime(time.Now()), - }); err != nil { + imagePartial := models.NewImagePartial() + imagePartial.GalleryIDs = galleryIDs + + if _, err := h.CreatorUpdater.UpdatePartial(ctx, i.ID, imagePartial); err != nil { return fmt.Errorf("updating image: %w", err) } if g != nil { - if _, err := h.GalleryFinder.UpdatePartial(ctx, g.ID, models.GalleryPartial{ - UpdatedAt: models.NewOptionalTime(time.Now()), - }); err != nil { + galleryPartial := models.GalleryPartial{ + // set UpdatedAt directly instead of using NewGalleryPartial, to ensure + // that the linked gallery has the same UpdatedAt time as this image + UpdatedAt: imagePartial.UpdatedAt, + } + if _, err := h.GalleryFinder.UpdatePartial(ctx, g.ID, galleryPartial); err != nil { return fmt.Errorf("updating gallery updated at timestamp: %w", err) } } @@ -239,7 +235,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return nil } -func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f models.File) (*models.Gallery, error) { folderID := f.Base().ParentFolderID g, err := h.GalleryFinder.FindByFolderID(ctx, folderID) if err != nil { @@ -252,16 +248,12 @@ func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f file. } // create a new folder-based gallery - now := time.Now() - newGallery := &models.Gallery{ - FolderID: &folderID, - CreatedAt: now, - UpdatedAt: now, - } + newGallery := models.NewGallery() + newGallery.FolderID = &folderID logger.Infof("Creating folder-based gallery for %s", filepath.Dir(f.Base().Path)) - if err := h.GalleryFinder.Create(ctx, newGallery, nil); err != nil { + if err := h.GalleryFinder.Create(ctx, &newGallery, nil); err != nil { return nil, fmt.Errorf("creating folder based gallery: %w", err) } @@ -269,11 +261,11 @@ func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f file. // it's possible that there are other images in the folder that // need to be added to the new gallery. Find and add them now. - if err := h.associateFolderImages(ctx, newGallery); err != nil { + if err := h.associateFolderImages(ctx, &newGallery); err != nil { return nil, fmt.Errorf("associating existing folder images: %w", err) } - return newGallery, nil + return &newGallery, nil } func (h *ScanHandler) associateFolderImages(ctx context.Context, g *models.Gallery) error { @@ -285,13 +277,13 @@ func (h *ScanHandler) associateFolderImages(ctx context.Context, g *models.Galle for _, ii := range i { logger.Infof("Adding %s to gallery %s", ii.Path, g.Path) - if _, err := h.CreatorUpdater.UpdatePartial(ctx, ii.ID, models.ImagePartial{ - GalleryIDs: &models.UpdateIDs{ - IDs: []int{g.ID}, - Mode: models.RelationshipUpdateModeAdd, - }, - UpdatedAt: models.NewOptionalTime(time.Now()), - }); err != nil { + imagePartial := models.NewImagePartial() + imagePartial.GalleryIDs = &models.UpdateIDs{ + IDs: []int{g.ID}, + Mode: models.RelationshipUpdateModeAdd, + } + + if _, err := h.CreatorUpdater.UpdatePartial(ctx, ii.ID, imagePartial); err != nil { return fmt.Errorf("updating image: %w", err) } } @@ -299,7 +291,7 @@ func (h *ScanHandler) associateFolderImages(ctx context.Context, g *models.Galle return nil } -func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile models.File) (*models.Gallery, error) { g, err := h.GalleryFinder.FindByFileID(ctx, zipFile.Base().ID) if err != nil { return nil, fmt.Errorf("finding zip based gallery: %w", err) @@ -311,24 +303,20 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile fi } // create a new zip-based gallery - now := time.Now() - newGallery := &models.Gallery{ - CreatedAt: now, - UpdatedAt: now, - } + newGallery := models.NewGallery() logger.Infof("%s doesn't exist. Creating new gallery...", zipFile.Base().Path) - if err := h.GalleryFinder.Create(ctx, newGallery, []file.ID{zipFile.Base().ID}); err != nil { + if err := h.GalleryFinder.Create(ctx, &newGallery, []models.FileID{zipFile.Base().ID}); err != nil { return nil, fmt.Errorf("creating zip-based gallery: %w", err) } h.PluginCache.RegisterPostHooks(ctx, newGallery.ID, plugin.GalleryCreatePost, nil, nil) - return newGallery, nil + return &newGallery, nil } -func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*models.Gallery, error) { // don't create folder-based galleries for files in zip file if f.Base().ZipFile != nil { return h.getOrCreateZipBasedGallery(ctx, f.Base().ZipFile) @@ -357,7 +345,7 @@ func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f file.File) (*mod return nil, nil } -func (h *ScanHandler) getGalleryToAssociate(ctx context.Context, newImage *models.Image, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getGalleryToAssociate(ctx context.Context, newImage *models.Image, f models.File) (*models.Gallery, error) { g, err := h.getOrCreateGallery(ctx, f) if err != nil { return nil, err diff --git a/pkg/image/service.go b/pkg/image/service.go index 5aacc4e59c2..55dc7686d1c 100644 --- a/pkg/image/service.go +++ b/pkg/image/service.go @@ -1,24 +1,10 @@ package image import ( - "context" - - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) -} - -type Repository interface { - FinderByFile - Destroyer - models.FileLoader -} - type Service struct { - File file.Store - Repository Repository + File models.FileReaderWriter + Repository models.ImageReaderWriter } diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go index dc07b0f5537..16191fa55bb 100644 --- a/pkg/image/thumbnail.go +++ b/pkg/image/thumbnail.go @@ -14,6 +14,7 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg/transcoder" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" ) const ffmpegImageQuality = 5 @@ -68,7 +69,7 @@ func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg, ffProbe ffmpeg.FFProbe, c // the provided max size. It resizes based on the largest X/Y direction. // It returns nil and an error if an error occurs reading, decoding or encoding // the image, or if the image is not suitable for thumbnails. -func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error) { +func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, error) { reader, err := f.Open(&file.OsFS{}) if err != nil { return nil, err @@ -82,7 +83,7 @@ func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error data := buf.Bytes() - if imageFile, ok := f.(*file.ImageFile); ok { + if imageFile, ok := f.(*models.ImageFile); ok { format := imageFile.Format animated := imageFile.Format == formatGif @@ -98,7 +99,7 @@ func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error } // Videofiles can only be thumbnailed with ffmpeg - if _, ok := f.(*file.VideoFile); ok { + if _, ok := f.(*models.VideoFile); ok { return e.ffmpegImageThumbnail(buf, maxSize) } diff --git a/pkg/image/update.go b/pkg/image/update.go index 3a173b7ad3d..844e2088f71 100644 --- a/pkg/image/update.go +++ b/pkg/image/update.go @@ -6,27 +6,22 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) -} - -func AddPerformer(ctx context.Context, qb PartialUpdater, i *models.Image, performerID int) error { - _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }) - +func AddPerformer(ctx context.Context, qb models.ImageUpdater, i *models.Image, performerID int) error { + imagePartial := models.NewImagePartial() + imagePartial.PerformerIDs = &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + } + _, err := qb.UpdatePartial(ctx, i.ID, imagePartial) return err } -func AddTag(ctx context.Context, qb PartialUpdater, i *models.Image, tagID int) error { - _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }) +func AddTag(ctx context.Context, qb models.ImageUpdater, i *models.Image, tagID int) error { + imagePartial := models.NewImagePartial() + imagePartial.TagIDs = &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + } + _, err := qb.UpdatePartial(ctx, i.ID, imagePartial) return err } diff --git a/pkg/match/cache.go b/pkg/match/cache.go index 06237c7f6ee..6946f65dbbc 100644 --- a/pkg/match/cache.go +++ b/pkg/match/cache.go @@ -20,7 +20,7 @@ type Cache struct { // against. This means that performers with single-letter words in their names could potentially // be missed. // This query is expensive, so it's queried once and cached, if the cache if provided. -func getSingleLetterPerformers(ctx context.Context, c *Cache, reader PerformerAutoTagQueryer) ([]*models.Performer, error) { +func getSingleLetterPerformers(ctx context.Context, c *Cache, reader models.PerformerAutoTagQueryer) ([]*models.Performer, error) { if c == nil { c = &Cache{} } @@ -53,7 +53,7 @@ func getSingleLetterPerformers(ctx context.Context, c *Cache, reader PerformerAu // getSingleLetterStudios returns all studios with names that start with single character words. // See getSingleLetterPerformers for details. -func getSingleLetterStudios(ctx context.Context, c *Cache, reader StudioAutoTagQueryer) ([]*models.Studio, error) { +func getSingleLetterStudios(ctx context.Context, c *Cache, reader models.StudioAutoTagQueryer) ([]*models.Studio, error) { if c == nil { c = &Cache{} } @@ -86,7 +86,7 @@ func getSingleLetterStudios(ctx context.Context, c *Cache, reader StudioAutoTagQ // getSingleLetterTags returns all tags with names that start with single character words. // See getSingleLetterPerformers for details. -func getSingleLetterTags(ctx context.Context, c *Cache, reader TagAutoTagQueryer) ([]*models.Tag, error) { +func getSingleLetterTags(ctx context.Context, c *Cache, reader models.TagAutoTagQueryer) ([]*models.Tag, error) { if c == nil { c = &Cache{} } diff --git a/pkg/match/path.go b/pkg/match/path.go index 666d643747a..0b5aaa6ba48 100644 --- a/pkg/match/path.go +++ b/pkg/match/path.go @@ -14,8 +14,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) const ( @@ -28,24 +26,6 @@ const ( var separatorRE = regexp.MustCompile(separatorPattern) -type PerformerAutoTagQueryer interface { - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Performer, error) - models.AliasLoader -} - -type StudioAutoTagQueryer interface { - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Studio, error) - studio.Queryer - GetAliases(ctx context.Context, studioID int) ([]string, error) -} - -type TagAutoTagQueryer interface { - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Tag, error) - tag.Queryer - GetAliases(ctx context.Context, tagID int) ([]string, error) -} - func getPathQueryRegex(name string) string { // escape specific regex characters name = regexp.QuoteMeta(name) @@ -146,7 +126,7 @@ func regexpMatchesPath(r *regexp.Regexp, path string) int { return found[len(found)-1][0] } -func getPerformers(ctx context.Context, words []string, performerReader PerformerAutoTagQueryer, cache *Cache) ([]*models.Performer, error) { +func getPerformers(ctx context.Context, words []string, performerReader models.PerformerAutoTagQueryer, cache *Cache) ([]*models.Performer, error) { performers, err := performerReader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -160,7 +140,7 @@ func getPerformers(ctx context.Context, words []string, performerReader Performe return append(performers, swPerformers...), nil } -func PathToPerformers(ctx context.Context, path string, reader PerformerAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Performer, error) { +func PathToPerformers(ctx context.Context, path string, reader models.PerformerAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Performer, error) { words := getPathWords(path, trimExt) performers, err := getPerformers(ctx, words, reader, cache) @@ -198,7 +178,7 @@ func PathToPerformers(ctx context.Context, path string, reader PerformerAutoTagQ return ret, nil } -func getStudios(ctx context.Context, words []string, reader StudioAutoTagQueryer, cache *Cache) ([]*models.Studio, error) { +func getStudios(ctx context.Context, words []string, reader models.StudioAutoTagQueryer, cache *Cache) ([]*models.Studio, error) { studios, err := reader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -215,7 +195,7 @@ func getStudios(ctx context.Context, words []string, reader StudioAutoTagQueryer // PathToStudio returns the Studio that matches the given path. // Where multiple matching studios are found, the one that matches the latest // position in the path is returned. -func PathToStudio(ctx context.Context, path string, reader StudioAutoTagQueryer, cache *Cache, trimExt bool) (*models.Studio, error) { +func PathToStudio(ctx context.Context, path string, reader models.StudioAutoTagQueryer, cache *Cache, trimExt bool) (*models.Studio, error) { words := getPathWords(path, trimExt) candidates, err := getStudios(ctx, words, reader, cache) @@ -249,7 +229,7 @@ func PathToStudio(ctx context.Context, path string, reader StudioAutoTagQueryer, return ret, nil } -func getTags(ctx context.Context, words []string, reader TagAutoTagQueryer, cache *Cache) ([]*models.Tag, error) { +func getTags(ctx context.Context, words []string, reader models.TagAutoTagQueryer, cache *Cache) ([]*models.Tag, error) { tags, err := reader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -263,7 +243,7 @@ func getTags(ctx context.Context, words []string, reader TagAutoTagQueryer, cach return append(tags, swTags...), nil } -func PathToTags(ctx context.Context, path string, reader TagAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Tag, error) { +func PathToTags(ctx context.Context, path string, reader models.TagAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Tag, error) { words := getPathWords(path, trimExt) tags, err := getTags(ctx, words, reader, cache) @@ -299,7 +279,7 @@ func PathToTags(ctx context.Context, path string, reader TagAutoTagQueryer, cach return ret, nil } -func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReader scene.Queryer, fn func(ctx context.Context, scene *models.Scene) error) error { +func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReader models.SceneQueryer, fn func(ctx context.Context, scene *models.Scene) error) error { regex := getPathQueryRegex(name) organized := false filter := models.SceneFilterType{ @@ -358,7 +338,7 @@ func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReade return nil } -func PathToImagesFn(ctx context.Context, name string, paths []string, imageReader image.Queryer, fn func(ctx context.Context, scene *models.Image) error) error { +func PathToImagesFn(ctx context.Context, name string, paths []string, imageReader models.ImageQueryer, fn func(ctx context.Context, scene *models.Image) error) error { regex := getPathQueryRegex(name) organized := false filter := models.ImageFilterType{ @@ -417,7 +397,7 @@ func PathToImagesFn(ctx context.Context, name string, paths []string, imageReade return nil } -func PathToGalleriesFn(ctx context.Context, name string, paths []string, galleryReader gallery.Queryer, fn func(ctx context.Context, scene *models.Gallery) error) error { +func PathToGalleriesFn(ctx context.Context, name string, paths []string, galleryReader models.GalleryQueryer, fn func(ctx context.Context, scene *models.Gallery) error) error { regex := getPathQueryRegex(name) organized := false filter := models.GalleryFilterType{ diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go index d1182a32963..675a8d7fcdd 100644 --- a/pkg/match/scraped.go +++ b/pkg/match/scraped.go @@ -58,7 +58,7 @@ func ScrapedPerformer(ctx context.Context, qb PerformerFinder, p *models.Scraped } type StudioFinder interface { - studio.Queryer + models.StudioQueryer FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) } @@ -134,7 +134,7 @@ func ScrapedMovie(ctx context.Context, qb MovieNamesFinder, m *models.ScrapedMov // ScrapedTag matches the provided tag with the tags // in the database and sets the ID field if one is found. -func ScrapedTag(ctx context.Context, qb tag.Queryer, s *models.ScrapedTag) error { +func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag) error { if s.StoredID != nil { return nil } diff --git a/pkg/models/file.go b/pkg/models/file.go index 827a55d5ca4..e6ce41d1e97 100644 --- a/pkg/models/file.go +++ b/pkg/models/file.go @@ -4,8 +4,6 @@ import ( "context" "path/filepath" "strings" - - "github.com/stashapp/stash/pkg/file" ) type FileQueryOptions struct { @@ -57,24 +55,24 @@ func PathsFileFilter(paths []string) *FileFilterType { type FileQueryResult struct { // can't use QueryResult because id type is wrong - IDs []file.ID + IDs []FileID Count int - finder file.Finder - files []file.File + getter FileGetter + files []File resolveErr error } -func NewFileQueryResult(finder file.Finder) *FileQueryResult { +func NewFileQueryResult(fileGetter FileGetter) *FileQueryResult { return &FileQueryResult{ - finder: finder, + getter: fileGetter, } } -func (r *FileQueryResult) Resolve(ctx context.Context) ([]file.File, error) { +func (r *FileQueryResult) Resolve(ctx context.Context) ([]File, error) { // cache results if r.files == nil && r.resolveErr == nil { - r.files, r.resolveErr = r.finder.Find(ctx, r.IDs...) + r.files, r.resolveErr = r.getter.Find(ctx, r.IDs...) } return r.files, r.resolveErr } diff --git a/pkg/file/fingerprint.go b/pkg/models/fingerprint.go similarity index 90% rename from pkg/file/fingerprint.go rename to pkg/models/fingerprint.go index 3155276c594..0123f289d9f 100644 --- a/pkg/file/fingerprint.go +++ b/pkg/models/fingerprint.go @@ -1,4 +1,9 @@ -package file +package models + +import ( + "fmt" + "strconv" +) var ( FingerprintTypeOshash = "oshash" @@ -12,6 +17,15 @@ type Fingerprint struct { Fingerprint interface{} } +func (f *Fingerprint) Value() string { + switch v := f.Fingerprint.(type) { + case int64: + return strconv.FormatUint(uint64(v), 16) + default: + return fmt.Sprintf("%v", f.Fingerprint) + } +} + type Fingerprints []Fingerprint func (f *Fingerprints) Remove(type_ string) { @@ -114,8 +128,3 @@ func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints { return append(f, o) } - -// FingerprintCalculator calculates a fingerprint for the provided file. -type FingerprintCalculator interface { - CalculateFingerprints(f *BaseFile, o Opener, useExisting bool) ([]Fingerprint, error) -} diff --git a/pkg/file/fingerprint_test.go b/pkg/models/fingerprint_test.go similarity index 99% rename from pkg/file/fingerprint_test.go rename to pkg/models/fingerprint_test.go index f13ce22545a..d35f4b08231 100644 --- a/pkg/file/fingerprint_test.go +++ b/pkg/models/fingerprint_test.go @@ -1,4 +1,4 @@ -package file +package models import "testing" diff --git a/pkg/models/fs.go b/pkg/models/fs.go new file mode 100644 index 00000000000..bdbf603498d --- /dev/null +++ b/pkg/models/fs.go @@ -0,0 +1,27 @@ +package models + +import ( + "io" + "io/fs" +) + +// FileOpener provides an interface to open a file. +type FileOpener interface { + Open() (io.ReadCloser, error) +} + +// FS represents a file system. +type FS interface { + Stat(name string) (fs.FileInfo, error) + Lstat(name string) (fs.FileInfo, error) + Open(name string) (fs.ReadDirFile, error) + OpenZip(name string) (ZipFS, error) + IsPathCaseSensitive(path string) (bool, error) +} + +// ZipFS represents a zip file system. +type ZipFS interface { + FS + io.Closer + OpenOnly(name string) (io.ReadCloser, error) +} diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 61ee2a72de3..d3644d3fd6c 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -1,11 +1,5 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) - type GalleryFilterType struct { And *GalleryFilterType `json:"AND"` Or *GalleryFilterType `json:"OR"` @@ -86,40 +80,3 @@ type GalleryDestroyInput struct { DeleteFile *bool `json:"delete_file"` DeleteGenerated *bool `json:"delete_generated"` } - -type GalleryFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Gallery, error) -} - -type GalleryReader interface { - Find(ctx context.Context, id int) (*Gallery, error) - GalleryFinder - FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error) - FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error) - FindByPath(ctx context.Context, path string) ([]*Gallery, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error) - FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error) - - SceneIDLoader - PerformerIDLoader - TagIDLoader - - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Gallery, error) - Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error) - QueryCount(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) (int, error) - GetImageIDs(ctx context.Context, galleryID int) ([]int, error) -} - -type GalleryWriter interface { - Create(ctx context.Context, newGallery *Gallery, fileIDs []file.ID) error - Update(ctx context.Context, updatedGallery *Gallery) error - UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) - Destroy(ctx context.Context, id int) error - UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error -} - -type GalleryReaderWriter interface { - GalleryReader - GalleryWriter -} diff --git a/pkg/models/gallery_chapter.go b/pkg/models/gallery_chapter.go deleted file mode 100644 index 0057809821b..00000000000 --- a/pkg/models/gallery_chapter.go +++ /dev/null @@ -1,21 +0,0 @@ -package models - -import "context" - -type GalleryChapterReader interface { - Find(ctx context.Context, id int) (*GalleryChapter, error) - FindMany(ctx context.Context, ids []int) ([]*GalleryChapter, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*GalleryChapter, error) -} - -type GalleryChapterWriter interface { - Create(ctx context.Context, newGalleryChapter *GalleryChapter) error - Update(ctx context.Context, updatedGalleryChapter *GalleryChapter) error - UpdatePartial(ctx context.Context, id int, updatedGalleryChapter GalleryChapterPartial) (*GalleryChapter, error) - Destroy(ctx context.Context, id int) error -} - -type GalleryChapterReaderWriter interface { - GalleryChapterReader - GalleryChapterWriter -} diff --git a/pkg/models/image.go b/pkg/models/image.go index 288f6997621..ef90dfd7d69 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -77,60 +77,21 @@ type ImageQueryResult struct { Megapixels float64 TotalSize float64 - finder ImageFinder + getter ImageGetter images []*Image resolveErr error } -func NewImageQueryResult(finder ImageFinder) *ImageQueryResult { +func NewImageQueryResult(getter ImageGetter) *ImageQueryResult { return &ImageQueryResult{ - finder: finder, + getter: getter, } } func (r *ImageQueryResult) Resolve(ctx context.Context) ([]*Image, error) { // cache results if r.images == nil && r.resolveErr == nil { - r.images, r.resolveErr = r.finder.FindMany(ctx, r.IDs) + r.images, r.resolveErr = r.getter.FindMany(ctx, r.IDs) } return r.images, r.resolveErr } - -type ImageFinder interface { - // TODO - rename to Find and remove existing method - FindMany(ctx context.Context, ids []int) ([]*Image, error) -} - -type ImageReader interface { - ImageFinder - // TODO - remove this in another PR - Find(ctx context.Context, id int) (*Image, error) - FindByChecksum(ctx context.Context, checksum string) ([]*Image, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) - CountByGalleryID(ctx context.Context, galleryID int) (int, error) - OCountByPerformerID(ctx context.Context, performerID int) (int, error) - Count(ctx context.Context) (int, error) - Size(ctx context.Context) (float64, error) - All(ctx context.Context) ([]*Image, error) - Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error) - QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) - - GalleryIDLoader - PerformerIDLoader - TagIDLoader -} - -type ImageWriter interface { - Create(ctx context.Context, newImage *ImageCreateInput) error - Update(ctx context.Context, updatedImage *Image) error - UpdatePartial(ctx context.Context, id int, partial ImagePartial) (*Image, error) - IncrementOCounter(ctx context.Context, id int) (int, error) - DecrementOCounter(ctx context.Context, id int) (int, error) - ResetOCounter(ctx context.Context, id int) (int, error) - Destroy(ctx context.Context, id int) error -} - -type ImageReaderWriter interface { - ImageReader - ImageWriter -} diff --git a/pkg/models/jsonschema/image.go b/pkg/models/jsonschema/image.go index 1862ffc8290..7ff0b21621f 100644 --- a/pkg/models/jsonschema/image.go +++ b/pkg/models/jsonschema/image.go @@ -10,10 +10,14 @@ import ( ) type Image struct { - Title string `json:"title,omitempty"` - Studio string `json:"studio,omitempty"` - Rating int `json:"rating,omitempty"` - URL string `json:"url,omitempty"` + Title string `json:"title,omitempty"` + Studio string `json:"studio,omitempty"` + Rating int `json:"rating,omitempty"` + + // deprecated - for import only + URL string `json:"url,omitempty"` + + URLs []string `json:"urls,omitempty"` Date string `json:"date,omitempty"` Organized bool `json:"organized,omitempty"` OCounter int `json:"o_counter,omitempty"` diff --git a/pkg/models/jsonschema/scene.go b/pkg/models/jsonschema/scene.go index 7ebae7a1785..8a081f3b610 100644 --- a/pkg/models/jsonschema/scene.go +++ b/pkg/models/jsonschema/scene.go @@ -42,8 +42,10 @@ type Scene struct { Title string `json:"title,omitempty"` Code string `json:"code,omitempty"` Studio string `json:"studio,omitempty"` + // deprecated - for import only - URL string `json:"url,omitempty"` + URL string `json:"url,omitempty"` + URLs []string `json:"urls,omitempty"` Date string `json:"date,omitempty"` Rating int `json:"rating,omitempty"` diff --git a/pkg/models/mocks/FileReaderWriter.go b/pkg/models/mocks/FileReaderWriter.go new file mode 100644 index 00000000000..8e7982b4745 --- /dev/null +++ b/pkg/models/mocks/FileReaderWriter.go @@ -0,0 +1,350 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + fs "io/fs" + + mock "github.com/stretchr/testify/mock" + + models "github.com/stashapp/stash/pkg/models" +) + +// FileReaderWriter is an autogenerated mock type for the FileReaderWriter type +type FileReaderWriter struct { + mock.Mock +} + +// CountAllInPaths provides a mock function with given fields: ctx, p +func (_m *FileReaderWriter) CountAllInPaths(ctx context.Context, p []string) (int, error) { + ret := _m.Called(ctx, p) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, []string) int); ok { + r0 = rf(ctx, p) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CountByFolderID provides a mock function with given fields: ctx, folderID +func (_m *FileReaderWriter) CountByFolderID(ctx context.Context, folderID models.FolderID) (int, error) { + ret := _m.Called(ctx, folderID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) int); ok { + r0 = rf(ctx, folderID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, folderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: ctx, f +func (_m *FileReaderWriter) Create(ctx context.Context, f models.File) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.File) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Destroy provides a mock function with given fields: ctx, id +func (_m *FileReaderWriter) Destroy(ctx context.Context, id models.FileID) error { + ret := _m.Called(ctx, id) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, id +func (_m *FileReaderWriter) Find(ctx context.Context, id ...models.FileID) ([]models.File, error) { + _va := make([]interface{}, len(id)) + for _i := range id { + _va[_i] = id[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, ...models.FileID) []models.File); ok { + r0 = rf(ctx, id...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, ...models.FileID) error); ok { + r1 = rf(ctx, id...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllByPath provides a mock function with given fields: ctx, path +func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]models.File, error) { + ret := _m.Called(ctx, path) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, string) []models.File); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset +func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]models.File, error) { + ret := _m.Called(ctx, p, limit, offset) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []models.File); ok { + r0 = rf(ctx, p, limit, offset) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { + r1 = rf(ctx, p, limit, offset) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFileInfo provides a mock function with given fields: ctx, info, size +func (_m *FileReaderWriter) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]models.File, error) { + ret := _m.Called(ctx, info, size) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, fs.FileInfo, int64) []models.File); ok { + r0 = rf(ctx, info, size) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, fs.FileInfo, int64) error); ok { + r1 = rf(ctx, info, size) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprint provides a mock function with given fields: ctx, fp +func (_m *FileReaderWriter) FindByFingerprint(ctx context.Context, fp models.Fingerprint) ([]models.File, error) { + ret := _m.Called(ctx, fp) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, models.Fingerprint) []models.File); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPath provides a mock function with given fields: ctx, path +func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string) (models.File, error) { + ret := _m.Called(ctx, path) + + var r0 models.File + if rf, ok := ret.Get(0).(func(context.Context, string) models.File); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *FileReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]models.File, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []models.File); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCaptions provides a mock function with given fields: ctx, fileID +func (_m *FileReaderWriter) GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.VideoCaption + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.VideoCaption); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.VideoCaption) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// IsPrimary provides a mock function with given fields: ctx, fileID +func (_m *FileReaderWriter) IsPrimary(ctx context.Context, fileID models.FileID) (bool, error) { + ret := _m.Called(ctx, fileID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) bool); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Query provides a mock function with given fields: ctx, options +func (_m *FileReaderWriter) Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error) { + ret := _m.Called(ctx, options) + + var r0 *models.FileQueryResult + if rf, ok := ret.Get(0).(func(context.Context, models.FileQueryOptions) *models.FileQueryResult); ok { + r0 = rf(ctx, options) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.FileQueryResult) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileQueryOptions) error); ok { + r1 = rf(ctx, options) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: ctx, f +func (_m *FileReaderWriter) Update(ctx context.Context, f models.File) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.File) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateCaptions provides a mock function with given fields: ctx, fileID, captions +func (_m *FileReaderWriter) UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error { + ret := _m.Called(ctx, fileID, captions) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FileID, []*models.VideoCaption) error); ok { + r0 = rf(ctx, fileID, captions) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/FolderReaderWriter.go b/pkg/models/mocks/FolderReaderWriter.go new file mode 100644 index 00000000000..968bed4adc7 --- /dev/null +++ b/pkg/models/mocks/FolderReaderWriter.go @@ -0,0 +1,193 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + + models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" +) + +// FolderReaderWriter is an autogenerated mock type for the FolderReaderWriter type +type FolderReaderWriter struct { + mock.Mock +} + +// CountAllInPaths provides a mock function with given fields: ctx, p +func (_m *FolderReaderWriter) CountAllInPaths(ctx context.Context, p []string) (int, error) { + ret := _m.Called(ctx, p) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, []string) int); ok { + r0 = rf(ctx, p) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: ctx, f +func (_m *FolderReaderWriter) Create(ctx context.Context, f *models.Folder) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Folder) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Destroy provides a mock function with given fields: ctx, id +func (_m *FolderReaderWriter) Destroy(ctx context.Context, id models.FolderID) error { + ret := _m.Called(ctx, id) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, id +func (_m *FolderReaderWriter) Find(ctx context.Context, id models.FolderID) (*models.Folder, error) { + ret := _m.Called(ctx, id) + + var r0 *models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) *models.Folder); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset +func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]*models.Folder, error) { + ret := _m.Called(ctx, p, limit, offset) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []*models.Folder); ok { + r0 = rf(ctx, p, limit, offset) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { + r1 = rf(ctx, p, limit, offset) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByParentFolderID provides a mock function with given fields: ctx, parentFolderID +func (_m *FolderReaderWriter) FindByParentFolderID(ctx context.Context, parentFolderID models.FolderID) ([]*models.Folder, error) { + ret := _m.Called(ctx, parentFolderID) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Folder); ok { + r0 = rf(ctx, parentFolderID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, parentFolderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPath provides a mock function with given fields: ctx, path +func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string) (*models.Folder, error) { + ret := _m.Called(ctx, path) + + var r0 *models.Folder + if rf, ok := ret.Get(0).(func(context.Context, string) *models.Folder); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *FolderReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Folder, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Folder); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: ctx, f +func (_m *FolderReaderWriter) Update(ctx context.Context, f *models.Folder) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Folder) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index 1c0ddf95775..2b901466b19 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -5,10 +5,8 @@ package mocks import ( context "context" - file "github.com/stashapp/stash/pkg/file" - mock "github.com/stretchr/testify/mock" - models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" ) // GalleryReaderWriter is an autogenerated mock type for the GalleryReaderWriter type @@ -16,6 +14,41 @@ type GalleryReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *GalleryReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AddImages provides a mock function with given fields: ctx, galleryID, imageIDs +func (_m *GalleryReaderWriter) AddImages(ctx context.Context, galleryID int, imageIDs ...int) error { + _va := make([]interface{}, len(imageIDs)) + for _i := range imageIDs { + _va[_i] = imageIDs[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, galleryID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, ...int) error); ok { + r0 = rf(ctx, galleryID, imageIDs...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *GalleryReaderWriter) All(ctx context.Context) ([]*models.Gallery, error) { ret := _m.Called(ctx) @@ -60,12 +93,33 @@ func (_m *GalleryReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *GalleryReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: ctx, newGallery, fileIDs -func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error { +func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error { ret := _m.Called(ctx, newGallery, fileIDs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []file.ID) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []models.FileID) error); ok { r0 = rf(ctx, newGallery, fileIDs) } else { r0 = ret.Error(0) @@ -157,6 +211,75 @@ func (_m *GalleryReaderWriter) FindByChecksums(ctx context.Context, checksums [] return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *GalleryReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Gallery); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *GalleryReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Gallery); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFolderID provides a mock function with given fields: ctx, folderID +func (_m *GalleryReaderWriter) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) { + ret := _m.Called(ctx, folderID) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Gallery); ok { + r0 = rf(ctx, folderID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, folderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByImageID provides a mock function with given fields: ctx, imageID func (_m *GalleryReaderWriter) FindByImageID(ctx context.Context, imageID int) ([]*models.Gallery, error) { ret := _m.Called(ctx, imageID) @@ -249,13 +372,59 @@ func (_m *GalleryReaderWriter) FindMany(ctx context.Context, ids []int) ([]*mode return r0, r1 } -// GetImageIDs provides a mock function with given fields: ctx, galleryID -func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, galleryID int) ([]int, error) { - ret := _m.Called(ctx, galleryID) +// FindUserGalleryByTitle provides a mock function with given fields: ctx, title +func (_m *GalleryReaderWriter) FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) { + ret := _m.Called(ctx, title) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, string) []*models.Gallery); ok { + r0 = rf(ctx, title) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, title) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetFiles provides a mock function with given fields: ctx, relatedID +func (_m *GalleryReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, int) []models.File); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetImageIDs provides a mock function with given fields: ctx, relatedID +func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) var r0 []int if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { - r0 = rf(ctx, galleryID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]int) @@ -264,7 +433,30 @@ func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, galleryID int) ( var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, galleryID) + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *GalleryReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) } else { r1 = ret.Error(1) } @@ -392,6 +584,27 @@ func (_m *GalleryReaderWriter) QueryCount(ctx context.Context, galleryFilter *mo return r0, r1 } +// RemoveImages provides a mock function with given fields: ctx, galleryID, imageIDs +func (_m *GalleryReaderWriter) RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error { + _va := make([]interface{}, len(imageIDs)) + for _i := range imageIDs { + _va[_i] = imageIDs[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, galleryID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, ...int) error); ok { + r0 = rf(ctx, galleryID, imageIDs...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGallery func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.Gallery) error { ret := _m.Called(ctx, updatedGallery) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index f745f8afe27..bd651108ab0 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -14,6 +14,20 @@ type ImageReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *ImageReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *ImageReaderWriter) All(ctx context.Context) ([]*models.Image, error) { ret := _m.Called(ctx) @@ -58,6 +72,27 @@ func (_m *ImageReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CountByGalleryID provides a mock function with given fields: ctx, galleryID func (_m *ImageReaderWriter) CountByGalleryID(ctx context.Context, galleryID int) (int, error) { ret := _m.Called(ctx, galleryID) @@ -79,13 +114,13 @@ func (_m *ImageReaderWriter) CountByGalleryID(ctx context.Context, galleryID int return r0, r1 } -// Create provides a mock function with given fields: ctx, newImage -func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.ImageCreateInput) error { - ret := _m.Called(ctx, newImage) +// Create provides a mock function with given fields: ctx, newImage, fileIDs +func (_m *ImageReaderWriter) Create(ctx context.Context, newImage *models.Image, fileIDs []models.FileID) error { + ret := _m.Called(ctx, newImage, fileIDs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.ImageCreateInput) error); ok { - r0 = rf(ctx, newImage) + if rf, ok := ret.Get(0).(func(context.Context, *models.Image, []models.FileID) error); ok { + r0 = rf(ctx, newImage, fileIDs) } else { r0 = ret.Error(0) } @@ -174,6 +209,75 @@ func (_m *ImageReaderWriter) FindByChecksum(ctx context.Context, checksum string return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Image); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *ImageReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Image); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFolderID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) FindByFolderID(ctx context.Context, fileID models.FolderID) ([]*models.Image, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Image); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByGalleryID provides a mock function with given fields: ctx, galleryID func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) ([]*models.Image, error) { ret := _m.Called(ctx, galleryID) @@ -197,6 +301,29 @@ func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) return r0, r1 } +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *ImageReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Image); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ctx, ids func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) { ret := _m.Called(ctx, ids) @@ -220,6 +347,29 @@ func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models return r0, r1 } +// GetFiles provides a mock function with given fields: ctx, relatedID +func (_m *ImageReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, int) []models.File); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetGalleryIDs provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) { ret := _m.Called(ctx, relatedID) @@ -243,6 +393,29 @@ func (_m *ImageReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *ImageReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetPerformerIDs provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) { ret := _m.Called(ctx, relatedID) @@ -289,6 +462,29 @@ func (_m *ImageReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]in return r0, r1 } +// GetURLs provides a mock function with given fields: ctx, relatedID +func (_m *ImageReaderWriter) GetURLs(ctx context.Context, relatedID int) ([]string, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []string + if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]string) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // IncrementOCounter provides a mock function with given fields: ctx, id func (_m *ImageReaderWriter) IncrementOCounter(ctx context.Context, id int) (int, error) { ret := _m.Called(ctx, id) @@ -453,3 +649,31 @@ func (_m *ImageReaderWriter) UpdatePartial(ctx context.Context, id int, partial return r0, r1 } + +// UpdatePerformers provides a mock function with given fields: ctx, imageID, performerIDs +func (_m *ImageReaderWriter) UpdatePerformers(ctx context.Context, imageID int, performerIDs []int) error { + ret := _m.Called(ctx, imageID, performerIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, imageID, performerIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateTags provides a mock function with given fields: ctx, imageID, tagIDs +func (_m *ImageReaderWriter) UpdateTags(ctx context.Context, imageID int, tagIDs []int) error { + ret := _m.Called(ctx, imageID, tagIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, imageID, tagIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/SceneMarkerReaderWriter.go b/pkg/models/mocks/SceneMarkerReaderWriter.go index 2be3b118437..4b4fa6d2ce9 100644 --- a/pkg/models/mocks/SceneMarkerReaderWriter.go +++ b/pkg/models/mocks/SceneMarkerReaderWriter.go @@ -199,13 +199,13 @@ func (_m *SceneMarkerReaderWriter) GetMarkerStrings(ctx context.Context, q *stri return r0, r1 } -// GetTagIDs provides a mock function with given fields: ctx, imageID -func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, imageID int) ([]int, error) { - ret := _m.Called(ctx, imageID) +// GetTagIDs provides a mock function with given fields: ctx, relatedID +func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) var r0 []int if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { - r0 = rf(ctx, imageID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]int) @@ -214,7 +214,7 @@ func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, imageID int) ( var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, imageID) + r1 = rf(ctx, relatedID) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 8d7245ee9ea..93374587198 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -5,10 +5,8 @@ package mocks import ( context "context" - file "github.com/stashapp/stash/pkg/file" - mock "github.com/stretchr/testify/mock" - models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" ) // SceneReaderWriter is an autogenerated mock type for the SceneReaderWriter type @@ -16,6 +14,34 @@ type SceneReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *SceneReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AddGalleryIDs provides a mock function with given fields: ctx, sceneID, galleryIDs +func (_m *SceneReaderWriter) AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error { + ret := _m.Called(ctx, sceneID, galleryIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, sceneID, galleryIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) { ret := _m.Called(ctx) @@ -39,6 +65,20 @@ func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) { return r0, r1 } +// AssignFiles provides a mock function with given fields: ctx, sceneID, fileID +func (_m *SceneReaderWriter) AssignFiles(ctx context.Context, sceneID int, fileID []models.FileID) error { + ret := _m.Called(ctx, sceneID, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []models.FileID) error); ok { + r0 = rf(ctx, sceneID, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Count provides a mock function with given fields: ctx func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) { ret := _m.Called(ctx) @@ -60,6 +100,27 @@ func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CountByMovieID provides a mock function with given fields: ctx, movieID func (_m *SceneReaderWriter) CountByMovieID(ctx context.Context, movieID int) (int, error) { ret := _m.Called(ctx, movieID) @@ -187,11 +248,11 @@ func (_m *SceneReaderWriter) CountMissingOSHash(ctx context.Context) (int, error } // Create provides a mock function with given fields: ctx, newScene, fileIDs -func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error { +func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error { ret := _m.Called(ctx, newScene, fileIDs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Scene, []file.ID) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.Scene, []models.FileID) error); ok { r0 = rf(ctx, newScene, fileIDs) } else { r0 = ret.Error(0) @@ -302,6 +363,52 @@ func (_m *SceneReaderWriter) FindByChecksum(ctx context.Context, checksum string return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Scene); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *SceneReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Scene); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByGalleryID provides a mock function with given fields: ctx, performerID func (_m *SceneReaderWriter) FindByGalleryID(ctx context.Context, performerID int) ([]*models.Scene, error) { ret := _m.Called(ctx, performerID) @@ -417,6 +524,29 @@ func (_m *SceneReaderWriter) FindByPerformerID(ctx context.Context, performerID return r0, r1 } +// FindByPrimaryFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Scene); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindDuplicates provides a mock function with given fields: ctx, distance, durationDiff func (_m *SceneReaderWriter) FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*models.Scene, error) { ret := _m.Called(ctx, distance, durationDiff) @@ -487,15 +617,15 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, } // GetFiles provides a mock function with given fields: ctx, relatedID -func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) { +func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) { ret := _m.Called(ctx, relatedID) - var r0 []*file.VideoFile - if rf, ok := ret.Get(0).(func(context.Context, int) []*file.VideoFile); ok { + var r0 []*models.VideoFile + if rf, ok := ret.Get(0).(func(context.Context, int) []*models.VideoFile); ok { r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*file.VideoFile) + r0 = ret.Get(0).([]*models.VideoFile) } } @@ -532,6 +662,29 @@ func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *SceneReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetMovies provides a mock function with given fields: ctx, id func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) { ret := _m.Called(ctx, id) @@ -689,20 +842,20 @@ func (_m *SceneReaderWriter) IncrementOCounter(ctx context.Context, id int) (int return r0, r1 } -// IncrementWatchCount provides a mock function with given fields: ctx, id -func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, id int) (int, error) { - ret := _m.Called(ctx, id) +// IncrementWatchCount provides a mock function with given fields: ctx, sceneID +func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, sceneID int) (int, error) { + ret := _m.Called(ctx, sceneID) var r0 int if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { - r0 = rf(ctx, id) + r0 = rf(ctx, sceneID) } else { r0 = ret.Get(0).(int) } var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, id) + r1 = rf(ctx, sceneID) } else { r1 = ret.Error(1) } @@ -859,20 +1012,20 @@ func (_m *SceneReaderWriter) ResetOCounter(ctx context.Context, id int) (int, er return r0, r1 } -// SaveActivity provides a mock function with given fields: ctx, id, resumeTime, playDuration -func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) { - ret := _m.Called(ctx, id, resumeTime, playDuration) +// SaveActivity provides a mock function with given fields: ctx, sceneID, resumeTime, playDuration +func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) { + ret := _m.Called(ctx, sceneID, resumeTime, playDuration) var r0 bool if rf, ok := ret.Get(0).(func(context.Context, int, *float64, *float64) bool); ok { - r0 = rf(ctx, id, resumeTime, playDuration) + r0 = rf(ctx, sceneID, resumeTime, playDuration) } else { r0 = ret.Get(0).(bool) } var r1 error if rf, ok := ret.Get(1).(func(context.Context, int, *float64, *float64) error); ok { - r1 = rf(ctx, id, resumeTime, playDuration) + r1 = rf(ctx, sceneID, resumeTime, playDuration) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index 56fd6200db7..c46e45d4c24 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -58,13 +58,13 @@ func (_m *StudioReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } -// Create provides a mock function with given fields: ctx, input -func (_m *StudioReaderWriter) Create(ctx context.Context, input *models.Studio) error { - ret := _m.Called(ctx, input) +// Create provides a mock function with given fields: ctx, newStudio +func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error { + ret := _m.Called(ctx, newStudio) var r0 error if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { - r0 = rf(ctx, input) + r0 = rf(ctx, newStudio) } else { r0 = ret.Error(0) } @@ -132,6 +132,29 @@ func (_m *StudioReaderWriter) FindByName(ctx context.Context, name string, nocas return r0, r1 } +// FindBySceneID provides a mock function with given fields: ctx, sceneID +func (_m *StudioReaderWriter) FindBySceneID(ctx context.Context, sceneID int) (*models.Studio, error) { + ret := _m.Called(ctx, sceneID) + + var r0 *models.Studio + if rf, ok := ret.Get(0).(func(context.Context, int) *models.Studio); ok { + r0 = rf(ctx, sceneID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, sceneID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByStashID provides a mock function with given fields: ctx, stashID func (_m *StudioReaderWriter) FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) { ret := _m.Called(ctx, stashID) @@ -395,13 +418,13 @@ func (_m *StudioReaderWriter) UpdateImage(ctx context.Context, studioID int, ima return r0 } -// UpdatePartial provides a mock function with given fields: ctx, input -func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, input models.StudioPartial) (*models.Studio, error) { - ret := _m.Called(ctx, input) +// UpdatePartial provides a mock function with given fields: ctx, updatedStudio +func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, updatedStudio models.StudioPartial) (*models.Studio, error) { + ret := _m.Called(ctx, updatedStudio) var r0 *models.Studio if rf, ok := ret.Get(0).(func(context.Context, models.StudioPartial) *models.Studio); ok { - r0 = rf(ctx, input) + r0 = rf(ctx, updatedStudio) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Studio) @@ -410,7 +433,7 @@ func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, input models.St var r1 error if rf, ok := ret.Get(1).(func(context.Context, models.StudioPartial) error); ok { - r1 = rf(ctx, input) + r1 = rf(ctx, updatedStudio) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index b4553c3d755..9b610e49b6e 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -58,6 +58,48 @@ func (_m *TagReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByChildTagID provides a mock function with given fields: ctx, childID +func (_m *TagReaderWriter) CountByChildTagID(ctx context.Context, childID int) (int, error) { + ret := _m.Called(ctx, childID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, childID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, childID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CountByParentTagID provides a mock function with given fields: ctx, parentID +func (_m *TagReaderWriter) CountByParentTagID(ctx context.Context, parentID int) (int, error) { + ret := _m.Called(ctx, parentID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { + r0 = rf(ctx, parentID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, parentID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: ctx, newTag func (_m *TagReaderWriter) Create(ctx context.Context, newTag *models.Tag) error { ret := _m.Called(ctx, newTag) @@ -385,13 +427,13 @@ func (_m *TagReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.T return r0, r1 } -// GetAliases provides a mock function with given fields: ctx, tagID -func (_m *TagReaderWriter) GetAliases(ctx context.Context, tagID int) ([]string, error) { - ret := _m.Called(ctx, tagID) +// GetAliases provides a mock function with given fields: ctx, relatedID +func (_m *TagReaderWriter) GetAliases(ctx context.Context, relatedID int) ([]string, error) { + ret := _m.Called(ctx, relatedID) var r0 []string if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok { - r0 = rf(ctx, tagID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]string) @@ -400,7 +442,7 @@ func (_m *TagReaderWriter) GetAliases(ctx context.Context, tagID int) ([]string, var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, tagID) + r1 = rf(ctx, relatedID) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/query.go b/pkg/models/mocks/query.go index 346bd1e5557..dd35d0f868f 100644 --- a/pkg/models/mocks/query.go +++ b/pkg/models/mocks/query.go @@ -31,6 +31,10 @@ type imageResolver struct { images []*models.Image } +func (s *imageResolver) Find(ctx context.Context, id int) (*models.Image, error) { + panic("not implemented") +} + func (s *imageResolver) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) { return s.images, nil } diff --git a/pkg/models/model_file.go b/pkg/models/model_file.go index 4e8ddbef8f6..b4ca2c3c1e8 100644 --- a/pkg/models/model_file.go +++ b/pkg/models/model_file.go @@ -1,9 +1,14 @@ package models import ( + "bytes" "fmt" "io" + "io/fs" + "math" + "net/http" "strconv" + "time" ) type HashAlgorithm string @@ -47,3 +52,244 @@ func (e *HashAlgorithm) UnmarshalGQL(v interface{}) error { func (e HashAlgorithm) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } + +// ID represents an ID of a file. +type FileID int32 + +func (i FileID) String() string { + return strconv.Itoa(int(i)) +} + +func (i *FileID) UnmarshalGQL(v interface{}) (err error) { + switch v := v.(type) { + case string: + var id int + id, err = strconv.Atoi(v) + *i = FileID(id) + return err + case int: + *i = FileID(v) + return nil + default: + return fmt.Errorf("%T is not an int", v) + } +} + +func (i FileID) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(i.String())) +} + +// DirEntry represents a file or directory in the file system. +type DirEntry struct { + ZipFileID *FileID `json:"zip_file_id"` + + // transient - not persisted + // only guaranteed to have id, path and basename set + ZipFile File + + ModTime time.Time `json:"mod_time"` +} + +func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) { + if e.ZipFile != nil { + zipPath := e.ZipFile.Base().Path + zfs, err := fs.OpenZip(zipPath) + if err != nil { + return nil, err + } + defer zfs.Close() + fs = zfs + } + // else assume os file + + ret, err := fs.Lstat(path) + return ret, err +} + +// File represents a file in the file system. +type File interface { + Base() *BaseFile + SetFingerprints(fp Fingerprints) + Open(fs FS) (io.ReadCloser, error) +} + +// BaseFile represents a file in the file system. +type BaseFile struct { + ID FileID `json:"id"` + + DirEntry + + // resolved from parent folder and basename only - not stored in DB + Path string `json:"path"` + + Basename string `json:"basename"` + ParentFolderID FolderID `json:"parent_folder_id"` + + Fingerprints Fingerprints `json:"fingerprints"` + + Size int64 `json:"size"` + + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (f *BaseFile) FingerprintSlice() []Fingerprint { + return f.Fingerprints +} + +// SetFingerprints sets the fingerprints of the file. +// If a fingerprint of the same type already exists, it is overwritten. +func (f *BaseFile) SetFingerprints(fp Fingerprints) { + for _, v := range fp { + f.SetFingerprint(v) + } +} + +// SetFingerprint sets the fingerprint of the file. +// If a fingerprint of the same type already exists, it is overwritten. +func (f *BaseFile) SetFingerprint(fp Fingerprint) { + for i, existing := range f.Fingerprints { + if existing.Type == fp.Type { + f.Fingerprints[i] = fp + return + } + } + + f.Fingerprints = append(f.Fingerprints, fp) +} + +// Base is used to fulfil the File interface. +func (f *BaseFile) Base() *BaseFile { + return f +} + +func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) { + if f.ZipFile != nil { + zipPath := f.ZipFile.Base().Path + zfs, err := fs.OpenZip(zipPath) + if err != nil { + return nil, err + } + + return zfs.OpenOnly(f.Path) + } + + return fs.Open(f.Path) +} + +func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) { + return f.info(fs, f.Path) +} + +func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { + reader, err := f.Open(fs) + if err != nil { + return err + } + + defer reader.Close() + + content, ok := reader.(io.ReadSeeker) + if !ok { + data, err := io.ReadAll(reader) + if err != nil { + return err + } + content = bytes.NewReader(data) + } + + if r.URL.Query().Has("t") { + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + } else { + w.Header().Set("Cache-Control", "no-cache") + } + http.ServeContent(w, r, f.Basename, f.ModTime, content) + + return nil +} + +// VisualFile is an interface for files that have a width and height. +type VisualFile interface { + File + GetWidth() int + GetHeight() int + GetFormat() string +} + +func GetMinResolution(f VisualFile) int { + w := f.GetWidth() + h := f.GetHeight() + + if w < h { + return w + } + + return h +} + +// ImageFile is an extension of BaseFile to represent image files. +type ImageFile struct { + *BaseFile + Format string `json:"format"` + Width int `json:"width"` + Height int `json:"height"` +} + +func (f ImageFile) GetWidth() int { + return f.Width +} + +func (f ImageFile) GetHeight() int { + return f.Height +} + +func (f ImageFile) GetFormat() string { + return f.Format +} + +// VideoFile is an extension of BaseFile to represent video files. +type VideoFile struct { + *BaseFile + Format string `json:"format"` + Width int `json:"width"` + Height int `json:"height"` + Duration float64 `json:"duration"` + VideoCodec string `json:"video_codec"` + AudioCodec string `json:"audio_codec"` + FrameRate float64 `json:"frame_rate"` + BitRate int64 `json:"bitrate"` + + Interactive bool `json:"interactive"` + InteractiveSpeed *int `json:"interactive_speed"` +} + +func (f VideoFile) GetWidth() int { + return f.Width +} + +func (f VideoFile) GetHeight() int { + return f.Height +} + +func (f VideoFile) GetFormat() string { + return f.Format +} + +// #1572 - Inf and NaN values cause the JSON marshaller to fail +// Replace these values with 0 rather than erroring + +func (f VideoFile) DurationFinite() float64 { + ret := f.Duration + if math.IsInf(ret, 0) || math.IsNaN(ret) { + return 0 + } + return ret +} + +func (f VideoFile) FrameRateFinite() float64 { + ret := f.FrameRate + if math.IsInf(ret, 0) || math.IsNaN(ret) { + return 0 + } + return ret +} diff --git a/pkg/models/model_folder.go b/pkg/models/model_folder.go new file mode 100644 index 00000000000..590cdd7bd30 --- /dev/null +++ b/pkg/models/model_folder.go @@ -0,0 +1,51 @@ +package models + +import ( + "fmt" + "io" + "io/fs" + "strconv" + "time" +) + +// FolderID represents an ID of a folder. +type FolderID int32 + +// String converts the ID to a string. +func (i FolderID) String() string { + return strconv.Itoa(int(i)) +} + +func (i *FolderID) UnmarshalGQL(v interface{}) (err error) { + switch v := v.(type) { + case string: + var id int + id, err = strconv.Atoi(v) + *i = FolderID(id) + return err + case int: + *i = FolderID(v) + return nil + default: + return fmt.Errorf("%T is not an int", v) + } +} + +func (i FolderID) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(i.String())) +} + +// Folder represents a folder in the file system. +type Folder struct { + ID FolderID `json:"id"` + DirEntry + Path string `json:"path"` + ParentFolderID *FolderID `json:"parent_folder_id"` + + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (f *Folder) Info(fs FS) (fs.FileInfo, error) { + return f.info(fs, f.Path) +} diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index 932d5cd17bc..c7c74a017f3 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -5,8 +5,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) type Gallery struct { @@ -24,11 +22,11 @@ type Gallery struct { // transient - not persisted Files RelatedFiles // transient - not persisted - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file or folder Path string - FolderID *file.FolderID `json:"folder_id"` + FolderID *FolderID `json:"folder_id"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` @@ -38,6 +36,45 @@ type Gallery struct { PerformerIDs RelatedIDs `json:"performer_ids"` } +func NewGallery() Gallery { + currentTime := time.Now() + return Gallery{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + +// GalleryPartial represents part of a Gallery object. It is used to update +// the database entry. Only non-nil fields will be updated. +type GalleryPartial struct { + // Path OptionalString + // Checksum OptionalString + // Zip OptionalBool + Title OptionalString + URL OptionalString + Date OptionalDate + Details OptionalString + // Rating expressed in 1-100 scale + Rating OptionalInt + Organized OptionalBool + StudioID OptionalInt + // FileModTime OptionalTime + CreatedAt OptionalTime + UpdatedAt OptionalTime + + SceneIDs *UpdateIDs + TagIDs *UpdateIDs + PerformerIDs *UpdateIDs + PrimaryFileID *FileID +} + +func NewGalleryPartial() GalleryPartial { + currentTime := time.Now() + return GalleryPartial{ + UpdatedAt: NewOptionalTime(currentTime), + } +} + // IsUserCreated returns true if the gallery was created by the user. // This is determined by whether the gallery has a primary file or folder. func (g *Gallery) IsUserCreated() bool { @@ -45,13 +82,13 @@ func (g *Gallery) IsUserCreated() bool { } func (g *Gallery) LoadFiles(ctx context.Context, l FileLoader) error { - return g.Files.load(func() ([]file.File, error) { + return g.Files.load(func() ([]File, error) { return l.GetFiles(ctx, g.ID) }) } -func (g *Gallery) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return g.Files.loadPrimary(func() (file.File, error) { +func (g *Gallery) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return g.Files.loadPrimary(func() (File, error) { if g.PrimaryFileID == nil { return nil, nil } @@ -89,7 +126,7 @@ func (g *Gallery) LoadTagIDs(ctx context.Context, l TagIDLoader) error { func (g Gallery) PrimaryChecksum() string { // renamed from Checksum to prevent gqlgen from using it in the resolver if p := g.Files.Primary(); p != nil { - v := p.Base().Fingerprints.Get(file.FingerprintTypeMD5) + v := p.Base().Fingerprints.Get(FingerprintTypeMD5) if v == nil { return "" } @@ -99,37 +136,6 @@ func (g Gallery) PrimaryChecksum() string { return "" } -// GalleryPartial represents part of a Gallery object. It is used to update -// the database entry. Only non-nil fields will be updated. -type GalleryPartial struct { - // Path OptionalString - // Checksum OptionalString - // Zip OptionalBool - Title OptionalString - URL OptionalString - Date OptionalDate - Details OptionalString - // Rating expressed in 1-100 scale - Rating OptionalInt - Organized OptionalBool - StudioID OptionalInt - // FileModTime OptionalTime - CreatedAt OptionalTime - UpdatedAt OptionalTime - - SceneIDs *UpdateIDs - TagIDs *UpdateIDs - PerformerIDs *UpdateIDs - PrimaryFileID *file.ID -} - -func NewGalleryPartial() GalleryPartial { - updatedTime := time.Now() - return GalleryPartial{ - UpdatedAt: NewOptionalTime(updatedTime), - } -} - // GetTitle returns the title of the scene. If the Title field is empty, // then the base filename is returned. func (g Gallery) GetTitle() string { @@ -155,13 +161,3 @@ func (g Gallery) DisplayName() string { } const DefaultGthumbWidth int = 640 - -type Galleries []*Gallery - -func (g *Galleries) Append(o interface{}) { - *g = append(*g, o.(*Gallery)) -} - -func (g *Galleries) New() interface{} { - return &Gallery{} -} diff --git a/pkg/models/model_gallery_chapter.go b/pkg/models/model_gallery_chapter.go index 5c9fc05b2be..6e527106bdd 100644 --- a/pkg/models/model_gallery_chapter.go +++ b/pkg/models/model_gallery_chapter.go @@ -13,6 +13,14 @@ type GalleryChapter struct { UpdatedAt time.Time `json:"updated_at"` } +func NewGalleryChapter() GalleryChapter { + currentTime := time.Now() + return GalleryChapter{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + // GalleryChapterPartial represents part of a GalleryChapter object. // It is used to update the database entry. type GalleryChapterPartial struct { @@ -24,8 +32,8 @@ type GalleryChapterPartial struct { } func NewGalleryChapterPartial() GalleryChapterPartial { - updatedTime := time.Now() + currentTime := time.Now() return GalleryChapterPartial{ - UpdatedAt: NewOptionalTime(updatedTime), + UpdatedAt: NewOptionalTime(currentTime), } } diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index e025ba0b174..8f3211dc7ac 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -5,8 +5,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) // Image stores the metadata for a single image. @@ -15,16 +13,16 @@ type Image struct { Title string `json:"title"` // Rating expressed in 1-100 scale - Rating *int `json:"rating"` - Organized bool `json:"organized"` - OCounter int `json:"o_counter"` - StudioID *int `json:"studio_id"` - URL string `json:"url"` - Date *Date `json:"date"` + Rating *int `json:"rating"` + Organized bool `json:"organized"` + OCounter int `json:"o_counter"` + StudioID *int `json:"studio_id"` + URLs RelatedStrings `json:"urls"` + Date *Date `json:"date"` // transient - not persisted Files RelatedFiles - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file - empty if no files Path string // transient - checksum of primary file - empty if no files @@ -38,14 +36,53 @@ type Image struct { PerformerIDs RelatedIDs `json:"performer_ids"` } +func NewImage() Image { + currentTime := time.Now() + return Image{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + +type ImagePartial struct { + Title OptionalString + // Rating expressed in 1-100 scale + Rating OptionalInt + URLs *UpdateStrings + Date OptionalDate + Organized OptionalBool + OCounter OptionalInt + StudioID OptionalInt + CreatedAt OptionalTime + UpdatedAt OptionalTime + + GalleryIDs *UpdateIDs + TagIDs *UpdateIDs + PerformerIDs *UpdateIDs + PrimaryFileID *FileID +} + +func NewImagePartial() ImagePartial { + currentTime := time.Now() + return ImagePartial{ + UpdatedAt: NewOptionalTime(currentTime), + } +} + +func (i *Image) LoadURLs(ctx context.Context, l URLLoader) error { + return i.URLs.load(func() ([]string, error) { + return l.GetURLs(ctx, i.ID) + }) +} + func (i *Image) LoadFiles(ctx context.Context, l FileLoader) error { - return i.Files.load(func() ([]file.File, error) { + return i.Files.load(func() ([]File, error) { return l.GetFiles(ctx, i.ID) }) } -func (i *Image) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return i.Files.loadPrimary(func() (file.File, error) { +func (i *Image) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return i.Files.loadPrimary(func() (File, error) { if i.PrimaryFileID == nil { return nil, nil } @@ -104,43 +141,3 @@ func (i Image) DisplayName() string { return strconv.Itoa(i.ID) } - -type ImageCreateInput struct { - *Image - FileIDs []file.ID -} - -type ImagePartial struct { - Title OptionalString - // Rating expressed in 1-100 scale - Rating OptionalInt - URL OptionalString - Date OptionalDate - Organized OptionalBool - OCounter OptionalInt - StudioID OptionalInt - CreatedAt OptionalTime - UpdatedAt OptionalTime - - GalleryIDs *UpdateIDs - TagIDs *UpdateIDs - PerformerIDs *UpdateIDs - PrimaryFileID *file.ID -} - -func NewImagePartial() ImagePartial { - updatedTime := time.Now() - return ImagePartial{ - UpdatedAt: NewOptionalTime(updatedTime), - } -} - -type Images []*Image - -func (i *Images) Append(o interface{}) { - *i = append(*i, o.(*Image)) -} - -func (i *Images) New() interface{} { - return &Image{} -} diff --git a/pkg/models/model_joins.go b/pkg/models/model_joins.go index 5fe8b7fa5d9..da70293c3d3 100644 --- a/pkg/models/model_joins.go +++ b/pkg/models/model_joins.go @@ -11,8 +11,8 @@ type MoviesScenes struct { SceneIndex *int `json:"scene_index"` } -func (s MoviesScenes) SceneMovieInput() *SceneMovieInput { - return &SceneMovieInput{ +func (s MoviesScenes) SceneMovieInput() SceneMovieInput { + return SceneMovieInput{ MovieID: strconv.Itoa(s.MovieID), SceneIndex: s.SceneIndex, } @@ -28,12 +28,12 @@ type UpdateMovieIDs struct { Mode RelationshipUpdateMode `json:"mode"` } -func (u *UpdateMovieIDs) SceneMovieInputs() []*SceneMovieInput { +func (u *UpdateMovieIDs) SceneMovieInputs() []SceneMovieInput { if u == nil { return nil } - ret := make([]*SceneMovieInput, len(u.Movies)) + ret := make([]SceneMovieInput, len(u.Movies)) for _, id := range u.Movies { ret = append(ret, id.SceneMovieInput()) } @@ -51,21 +51,7 @@ func (u *UpdateMovieIDs) AddUnique(v MoviesScenes) { u.Movies = append(u.Movies, v) } -func UpdateMovieIDsFromInput(i []*SceneMovieInput) (*UpdateMovieIDs, error) { - ret := &UpdateMovieIDs{ - Mode: RelationshipUpdateModeSet, - } - - var err error - ret.Movies, err = MoviesScenesFromInput(i) - if err != nil { - return nil, err - } - - return ret, nil -} - -func MoviesScenesFromInput(input []*SceneMovieInput) ([]MoviesScenes, error) { +func MoviesScenesFromInput(input []SceneMovieInput) ([]MoviesScenes, error) { ret := make([]MoviesScenes, len(input)) for i, v := range input { diff --git a/pkg/models/model_movie.go b/pkg/models/model_movie.go index cf7f997d887..152f0d3bbb5 100644 --- a/pkg/models/model_movie.go +++ b/pkg/models/model_movie.go @@ -20,6 +20,14 @@ type Movie struct { UpdatedAt time.Time `json:"updated_at"` } +func NewMovie() Movie { + currentTime := time.Now() + return Movie{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + type MoviePartial struct { Name OptionalString Aliases OptionalString @@ -35,30 +43,11 @@ type MoviePartial struct { UpdatedAt OptionalTime } -var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC" - -func NewMovie(name string) *Movie { - currentTime := time.Now() - return &Movie{ - Name: name, - CreatedAt: currentTime, - UpdatedAt: currentTime, - } -} - func NewMoviePartial() MoviePartial { - updatedTime := time.Now() + currentTime := time.Now() return MoviePartial{ - UpdatedAt: NewOptionalTime(updatedTime), + UpdatedAt: NewOptionalTime(currentTime), } } -type Movies []*Movie - -func (m *Movies) Append(o interface{}) { - *m = append(*m, o.(*Movie)) -} - -func (m *Movies) New() interface{} { - return &Movie{} -} +var DefaultMovieImage = "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAGQAAABkCAYAAABw4pVUAAAABmJLR0QA/wD/AP+gvaeTAAAACXBIWXMAAA3XAAAN1wFCKJt4AAAAB3RJTUUH4wgVBQsJl1CMZAAAASJJREFUeNrt3N0JwyAYhlEj3cj9R3Cm5rbkqtAP+qrnGaCYHPwJpLlaa++mmLpbAERAgAgIEAEBIiBABERAgAgIEAEBIiBABERAgAgIEAHZuVflj40x4i94zhk9vqsVvEq6AsQqMP1EjORx20OACAgQRRx7T+zzcFBxcjNDfoB4ntQqTm5Awo7MlqywZxcgYQ+RlqywJ3ozJAQCSBiEJSsQA0gYBpDAgAARECACAkRAgAgIEAERECACAmSjUv6eAOSB8m8YIGGzBUjYbAESBgMkbBkDEjZbgITBAClcxiqQvEoatreYIWEBASIgJ4Gkf11ntXH3nS9uxfGWfJ5J9hAgAgJEQAQEiIAAERAgAgJEQAQEiIAAERAgAgJEQAQEiL7qBuc6RKLHxr0CAAAAAElFTkSuQmCC" diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index a620f306516..09f92e13c6d 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -41,38 +41,12 @@ type Performer struct { StashIDs RelatedStashIDs `json:"stash_ids"` } -func (s *Performer) LoadAliases(ctx context.Context, l AliasLoader) error { - return s.Aliases.load(func() ([]string, error) { - return l.GetAliases(ctx, s.ID) - }) -} - -func (s *Performer) LoadTagIDs(ctx context.Context, l TagIDLoader) error { - return s.TagIDs.load(func() ([]int, error) { - return l.GetTagIDs(ctx, s.ID) - }) -} - -func (s *Performer) LoadStashIDs(ctx context.Context, l StashIDLoader) error { - return s.StashIDs.load(func() ([]StashID, error) { - return l.GetStashIDs(ctx, s.ID) - }) -} - -func (s *Performer) LoadRelationships(ctx context.Context, l PerformerReader) error { - if err := s.LoadAliases(ctx, l); err != nil { - return err - } - - if err := s.LoadTagIDs(ctx, l); err != nil { - return err - } - - if err := s.LoadStashIDs(ctx, l); err != nil { - return err +func NewPerformer() Performer { + currentTime := time.Now() + return Performer{ + CreatedAt: currentTime, + UpdatedAt: currentTime, } - - return nil } // PerformerPartial represents part of a Performer object. It is used to update @@ -112,28 +86,43 @@ type PerformerPartial struct { StashIDs *UpdateStashIDs } -func NewPerformer(name string) *Performer { +func NewPerformerPartial() PerformerPartial { currentTime := time.Now() - return &Performer{ - Name: name, - CreatedAt: currentTime, - UpdatedAt: currentTime, + return PerformerPartial{ + UpdatedAt: NewOptionalTime(currentTime), } } -func NewPerformerPartial() PerformerPartial { - updatedTime := time.Now() - return PerformerPartial{ - UpdatedAt: NewOptionalTime(updatedTime), - } +func (s *Performer) LoadAliases(ctx context.Context, l AliasLoader) error { + return s.Aliases.load(func() ([]string, error) { + return l.GetAliases(ctx, s.ID) + }) } -type Performers []*Performer +func (s *Performer) LoadTagIDs(ctx context.Context, l TagIDLoader) error { + return s.TagIDs.load(func() ([]int, error) { + return l.GetTagIDs(ctx, s.ID) + }) +} -func (p *Performers) Append(o interface{}) { - *p = append(*p, o.(*Performer)) +func (s *Performer) LoadStashIDs(ctx context.Context, l StashIDLoader) error { + return s.StashIDs.load(func() ([]StashID, error) { + return l.GetStashIDs(ctx, s.ID) + }) } -func (p *Performers) New() interface{} { - return &Performer{} +func (s *Performer) LoadRelationships(ctx context.Context, l PerformerReader) error { + if err := s.LoadAliases(ctx, l); err != nil { + return err + } + + if err := s.LoadTagIDs(ctx, l); err != nil { + return err + } + + if err := s.LoadStashIDs(ctx, l); err != nil { + return err + } + + return nil } diff --git a/pkg/models/model_saved_filter.go b/pkg/models/model_saved_filter.go index 23f06e2600e..d680e7c95ef 100644 --- a/pkg/models/model_saved_filter.go +++ b/pkg/models/model_saved_filter.go @@ -60,19 +60,10 @@ func (e FilterMode) MarshalGQL(w io.Writer) { } type SavedFilter struct { - ID int `json:"id"` - Mode FilterMode `json:"mode"` - Name string `json:"name"` - // JSON-encoded filter string - Filter string `json:"filter"` -} - -type SavedFilters []*SavedFilter - -func (m *SavedFilters) Append(o interface{}) { - *m = append(*m, o.(*SavedFilter)) -} - -func (m *SavedFilters) New() interface{} { - return &SavedFilter{} + ID int `db:"id" json:"id"` + Mode FilterMode `db:"mode" json:"mode"` + Name string `db:"name" json:"name"` + FindFilter *FindFilterType `json:"find_filter"` + ObjectFilter map[string]interface{} `json:"object_filter"` + UIOptions map[string]interface{} `json:"ui_options"` } diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index f19113f499a..4cd434eed80 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -6,8 +6,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) // Scene stores the metadata for a single video scene. @@ -26,7 +24,7 @@ type Scene struct { // transient - not persisted Files RelatedVideoFiles - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file - empty if no files Path string // transient - oshash of primary file - empty if no files @@ -50,6 +48,50 @@ type Scene struct { StashIDs RelatedStashIDs `json:"stash_ids"` } +func NewScene() Scene { + currentTime := time.Now() + return Scene{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + +// ScenePartial represents part of a Scene object. It is used to update +// the database entry. +type ScenePartial struct { + Title OptionalString + Code OptionalString + Details OptionalString + Director OptionalString + Date OptionalDate + // Rating expressed in 1-100 scale + Rating OptionalInt + Organized OptionalBool + OCounter OptionalInt + StudioID OptionalInt + CreatedAt OptionalTime + UpdatedAt OptionalTime + ResumeTime OptionalFloat64 + PlayDuration OptionalFloat64 + PlayCount OptionalInt + LastPlayedAt OptionalTime + + URLs *UpdateStrings + GalleryIDs *UpdateIDs + TagIDs *UpdateIDs + PerformerIDs *UpdateIDs + MovieIDs *UpdateMovieIDs + StashIDs *UpdateStashIDs + PrimaryFileID *FileID +} + +func NewScenePartial() ScenePartial { + currentTime := time.Now() + return ScenePartial{ + UpdatedAt: NewOptionalTime(currentTime), + } +} + func (s *Scene) LoadURLs(ctx context.Context, l URLLoader) error { return s.URLs.load(func() ([]string, error) { return l.GetURLs(ctx, s.ID) @@ -57,13 +99,13 @@ func (s *Scene) LoadURLs(ctx context.Context, l URLLoader) error { } func (s *Scene) LoadFiles(ctx context.Context, l VideoFileLoader) error { - return s.Files.load(func() ([]*file.VideoFile, error) { + return s.Files.load(func() ([]*VideoFile, error) { return l.GetFiles(ctx, s.ID) }) } -func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return s.Files.loadPrimary(func() (*file.VideoFile, error) { +func (s *Scene) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return s.Files.loadPrimary(func() (*VideoFile, error) { if s.PrimaryFileID == nil { return nil, nil } @@ -73,10 +115,10 @@ func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error { return nil, err } - var vf *file.VideoFile + var vf *VideoFile if len(f) > 0 { var ok bool - vf, ok = f[0].(*file.VideoFile) + vf, ok = f[0].(*VideoFile) if !ok { return nil, errors.New("not a video file") } @@ -147,77 +189,6 @@ func (s *Scene) LoadRelationships(ctx context.Context, l SceneReader) error { return nil } -// ScenePartial represents part of a Scene object. It is used to update -// the database entry. -type ScenePartial struct { - Title OptionalString - Code OptionalString - Details OptionalString - Director OptionalString - Date OptionalDate - // Rating expressed in 1-100 scale - Rating OptionalInt - Organized OptionalBool - OCounter OptionalInt - StudioID OptionalInt - CreatedAt OptionalTime - UpdatedAt OptionalTime - ResumeTime OptionalFloat64 - PlayDuration OptionalFloat64 - PlayCount OptionalInt - LastPlayedAt OptionalTime - - URLs *UpdateStrings - GalleryIDs *UpdateIDs - TagIDs *UpdateIDs - PerformerIDs *UpdateIDs - MovieIDs *UpdateMovieIDs - StashIDs *UpdateStashIDs - PrimaryFileID *file.ID -} - -func NewScenePartial() ScenePartial { - updatedTime := time.Now() - return ScenePartial{ - UpdatedAt: NewOptionalTime(updatedTime), - } -} - -type SceneMovieInput struct { - MovieID string `json:"movie_id"` - SceneIndex *int `json:"scene_index"` -} - -type SceneUpdateInput struct { - ClientMutationID *string `json:"clientMutationId"` - ID string `json:"id"` - Title *string `json:"title"` - Code *string `json:"code"` - Details *string `json:"details"` - Director *string `json:"director"` - URL *string `json:"url"` - Date *string `json:"date"` - // Rating expressed in 1-5 scale - Rating *int `json:"rating"` - // Rating expressed in 1-100 scale - Rating100 *int `json:"rating100"` - OCounter *int `json:"o_counter"` - Organized *bool `json:"organized"` - Urls []string `json:"urls"` - StudioID *string `json:"studio_id"` - GalleryIds []string `json:"gallery_ids"` - PerformerIds []string `json:"performer_ids"` - Movies []*SceneMovieInput `json:"movies"` - TagIds []string `json:"tag_ids"` - // This should be a URL or a base64 encoded data URL - CoverImage *string `json:"cover_image"` - StashIds []StashID `json:"stash_ids"` - ResumeTime *float64 `json:"resume_time"` - PlayDuration *float64 `json:"play_duration"` - PlayCount *int `json:"play_count"` - PrimaryFileID *string `json:"primary_file_id"` -} - // UpdateInput constructs a SceneUpdateInput using the populated fields in the ScenePartial object. func (s ScenePartial) UpdateInput(id int) SceneUpdateInput { var dateStr *string @@ -304,16 +275,6 @@ type SceneFileType struct { Bitrate *int `graphql:"bitrate" json:"bitrate"` } -type Scenes []*Scene - -func (s *Scenes) Append(o interface{}) { - *s = append(*s, o.(*Scene)) -} - -func (s *Scenes) New() interface{} { - return &Scene{} -} - type VideoCaption struct { LanguageCode string `json:"language_code"` Filename string `json:"filename"` diff --git a/pkg/models/model_scene_marker.go b/pkg/models/model_scene_marker.go index 1e9ac611589..df77afecd77 100644 --- a/pkg/models/model_scene_marker.go +++ b/pkg/models/model_scene_marker.go @@ -14,6 +14,14 @@ type SceneMarker struct { UpdatedAt time.Time `json:"updated_at"` } +func NewSceneMarker() SceneMarker { + currentTime := time.Now() + return SceneMarker{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + // SceneMarkerPartial represents part of a SceneMarker object. // It is used to update the database entry. type SceneMarkerPartial struct { @@ -26,8 +34,8 @@ type SceneMarkerPartial struct { } func NewSceneMarkerPartial() SceneMarkerPartial { - updatedTime := time.Now() + currentTime := time.Now() return SceneMarkerPartial{ - UpdatedAt: NewOptionalTime(updatedTime), + UpdatedAt: NewOptionalTime(currentTime), } } diff --git a/pkg/models/model_scraped_item.go b/pkg/models/model_scraped_item.go index 97d403b10ef..cb383c082e7 100644 --- a/pkg/models/model_scraped_item.go +++ b/pkg/models/model_scraped_item.go @@ -3,7 +3,6 @@ package models import ( "context" "strconv" - "time" "github.com/stashapp/stash/pkg/sliceutil/stringslice" "github.com/stashapp/stash/pkg/utils" @@ -23,17 +22,12 @@ type ScrapedStudio struct { func (ScrapedStudio) IsScrapedContent() {} func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Studio { - now := time.Now() - // Populate a new studio from the input - newStudio := Studio{ - Name: s.Name, - CreatedAt: now, - UpdatedAt: now, - } + ret := NewStudio() + ret.Name = s.Name if s.RemoteSiteID != nil && endpoint != "" { - newStudio.StashIDs = NewRelatedStashIDs([]StashID{ + ret.StashIDs = NewRelatedStashIDs([]StashID{ { Endpoint: endpoint, StashID: *s.RemoteSiteID, @@ -42,15 +36,15 @@ func (s *ScrapedStudio) ToStudio(endpoint string, excluded map[string]bool) *Stu } if s.URL != nil && !excluded["url"] { - newStudio.URL = *s.URL + ret.URL = *s.URL } if s.Parent != nil && s.Parent.StoredID != nil && !excluded["parent"] && !excluded["parent_studio"] { parentId, _ := strconv.Atoi(*s.Parent.StoredID) - newStudio.ParentID = &parentId + ret.ParentID = &parentId } - return &newStudio + return &ret } func (s *ScrapedStudio) GetImage(ctx context.Context, excluded map[string]bool) ([]byte, error) { @@ -69,17 +63,15 @@ func (s *ScrapedStudio) GetImage(ctx context.Context, excluded map[string]bool) } func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[string]bool, existingStashIDs []StashID) *StudioPartial { - partial := StudioPartial{ - UpdatedAt: NewOptionalTime(time.Now()), - } - partial.ID, _ = strconv.Atoi(*id) + ret := NewStudioPartial() + ret.ID, _ = strconv.Atoi(*id) if s.Name != "" && !excluded["name"] { - partial.Name = NewOptionalString(s.Name) + ret.Name = NewOptionalString(s.Name) } if s.URL != nil && !excluded["url"] { - partial.URL = NewOptionalString(*s.URL) + ret.URL = NewOptionalString(*s.URL) } if s.Parent != nil && !excluded["parent"] { @@ -87,25 +79,25 @@ func (s *ScrapedStudio) ToPartial(id *string, endpoint string, excluded map[stri parentID, _ := strconv.Atoi(*s.Parent.StoredID) if parentID > 0 { // This is to be set directly as we know it has a value and the translator won't have the field - partial.ParentID = NewOptionalInt(parentID) + ret.ParentID = NewOptionalInt(parentID) } } } else { - partial.ParentID = NewOptionalIntPtr(nil) + ret.ParentID = NewOptionalIntPtr(nil) } if s.RemoteSiteID != nil && endpoint != "" { - partial.StashIDs = &UpdateStashIDs{ + ret.StashIDs = &UpdateStashIDs{ StashIDs: existingStashIDs, Mode: RelationshipUpdateModeSet, } - partial.StashIDs.Set(StashID{ + ret.StashIDs.Set(StashID{ Endpoint: endpoint, StashID: *s.RemoteSiteID, }) } - return &partial + return &ret } // A performer from a scraping operation... @@ -145,7 +137,8 @@ type ScrapedPerformer struct { func (ScrapedPerformer) IsScrapedContent() {} func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool) *Performer { - ret := NewPerformer(*p.Name) + ret := NewPerformer() + ret.Name = *p.Name if p.Aliases != nil && !excluded["aliases"] { ret.Aliases = NewRelatedStrings(stringslice.FromString(*p.Aliases, ",")) @@ -244,7 +237,7 @@ func (p *ScrapedPerformer) ToPerformer(endpoint string, excluded map[string]bool }) } - return ret + return &ret } func (p *ScrapedPerformer) GetImage(ctx context.Context, excluded map[string]bool) ([]byte, error) { @@ -263,10 +256,10 @@ func (p *ScrapedPerformer) GetImage(ctx context.Context, excluded map[string]boo } func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, existingStashIDs []StashID) PerformerPartial { - partial := NewPerformerPartial() + ret := NewPerformerPartial() if p.Aliases != nil && !excluded["aliases"] { - partial.Aliases = &UpdateStrings{ + ret.Aliases = &UpdateStrings{ Values: stringslice.FromString(*p.Aliases, ","), Mode: RelationshipUpdateModeSet, } @@ -274,88 +267,88 @@ func (p *ScrapedPerformer) ToPartial(endpoint string, excluded map[string]bool, if p.Birthdate != nil && !excluded["birthdate"] { date, err := ParseDate(*p.Birthdate) if err == nil { - partial.Birthdate = NewOptionalDate(date) + ret.Birthdate = NewOptionalDate(date) } } if p.DeathDate != nil && !excluded["death_date"] { date, err := ParseDate(*p.DeathDate) if err == nil { - partial.DeathDate = NewOptionalDate(date) + ret.DeathDate = NewOptionalDate(date) } } if p.CareerLength != nil && !excluded["career_length"] { - partial.CareerLength = NewOptionalString(*p.CareerLength) + ret.CareerLength = NewOptionalString(*p.CareerLength) } if p.Country != nil && !excluded["country"] { - partial.Country = NewOptionalString(*p.Country) + ret.Country = NewOptionalString(*p.Country) } if p.Ethnicity != nil && !excluded["ethnicity"] { - partial.Ethnicity = NewOptionalString(*p.Ethnicity) + ret.Ethnicity = NewOptionalString(*p.Ethnicity) } if p.EyeColor != nil && !excluded["eye_color"] { - partial.EyeColor = NewOptionalString(*p.EyeColor) + ret.EyeColor = NewOptionalString(*p.EyeColor) } if p.HairColor != nil && !excluded["hair_color"] { - partial.HairColor = NewOptionalString(*p.HairColor) + ret.HairColor = NewOptionalString(*p.HairColor) } if p.FakeTits != nil && !excluded["fake_tits"] { - partial.FakeTits = NewOptionalString(*p.FakeTits) + ret.FakeTits = NewOptionalString(*p.FakeTits) } if p.Gender != nil && !excluded["gender"] { - partial.Gender = NewOptionalString(*p.Gender) + ret.Gender = NewOptionalString(*p.Gender) } if p.Height != nil && !excluded["height"] { h, err := strconv.Atoi(*p.Height) if err == nil { - partial.Height = NewOptionalInt(h) + ret.Height = NewOptionalInt(h) } } if p.Weight != nil && !excluded["weight"] { w, err := strconv.Atoi(*p.Weight) if err == nil { - partial.Weight = NewOptionalInt(w) + ret.Weight = NewOptionalInt(w) } } if p.Instagram != nil && !excluded["instagram"] { - partial.Instagram = NewOptionalString(*p.Instagram) + ret.Instagram = NewOptionalString(*p.Instagram) } if p.Measurements != nil && !excluded["measurements"] { - partial.Measurements = NewOptionalString(*p.Measurements) + ret.Measurements = NewOptionalString(*p.Measurements) } if p.Name != nil && !excluded["name"] { - partial.Name = NewOptionalString(*p.Name) + ret.Name = NewOptionalString(*p.Name) } if p.Disambiguation != nil && !excluded["disambiguation"] { - partial.Disambiguation = NewOptionalString(*p.Disambiguation) + ret.Disambiguation = NewOptionalString(*p.Disambiguation) } if p.Details != nil && !excluded["details"] { - partial.Details = NewOptionalString(*p.Details) + ret.Details = NewOptionalString(*p.Details) } if p.Piercings != nil && !excluded["piercings"] { - partial.Piercings = NewOptionalString(*p.Piercings) + ret.Piercings = NewOptionalString(*p.Piercings) } if p.Tattoos != nil && !excluded["tattoos"] { - partial.Tattoos = NewOptionalString(*p.Tattoos) + ret.Tattoos = NewOptionalString(*p.Tattoos) } if p.Twitter != nil && !excluded["twitter"] { - partial.Twitter = NewOptionalString(*p.Twitter) + ret.Twitter = NewOptionalString(*p.Twitter) } if p.URL != nil && !excluded["url"] { - partial.URL = NewOptionalString(*p.URL) + ret.URL = NewOptionalString(*p.URL) } if p.RemoteSiteID != nil && endpoint != "" { - partial.StashIDs = &UpdateStashIDs{ + ret.StashIDs = &UpdateStashIDs{ StashIDs: existingStashIDs, Mode: RelationshipUpdateModeSet, } - partial.StashIDs.Set(StashID{ + ret.StashIDs.Set(StashID{ Endpoint: endpoint, StashID: *p.RemoteSiteID, }) } - return partial + return ret } type ScrapedTag struct { diff --git a/pkg/models/model_studio.go b/pkg/models/model_studio.go index 9f1deca4974..109535be1b5 100644 --- a/pkg/models/model_studio.go +++ b/pkg/models/model_studio.go @@ -21,28 +21,12 @@ type Studio struct { StashIDs RelatedStashIDs `json:"stash_ids"` } -func (s *Studio) LoadAliases(ctx context.Context, l AliasLoader) error { - return s.Aliases.load(func() ([]string, error) { - return l.GetAliases(ctx, s.ID) - }) -} - -func (s *Studio) LoadStashIDs(ctx context.Context, l StashIDLoader) error { - return s.StashIDs.load(func() ([]StashID, error) { - return l.GetStashIDs(ctx, s.ID) - }) -} - -func (s *Studio) LoadRelationships(ctx context.Context, l PerformerReader) error { - if err := s.LoadAliases(ctx, l); err != nil { - return err +func NewStudio() Studio { + currentTime := time.Now() + return Studio{ + CreatedAt: currentTime, + UpdatedAt: currentTime, } - - if err := s.LoadStashIDs(ctx, l); err != nil { - return err - } - - return nil } // StudioPartial represents part of a Studio object. It is used to update the database entry. @@ -62,12 +46,33 @@ type StudioPartial struct { StashIDs *UpdateStashIDs } -type Studios []*Studio +func NewStudioPartial() StudioPartial { + currentTime := time.Now() + return StudioPartial{ + UpdatedAt: NewOptionalTime(currentTime), + } +} -func (s *Studios) Append(o interface{}) { - *s = append(*s, o.(*Studio)) +func (s *Studio) LoadAliases(ctx context.Context, l AliasLoader) error { + return s.Aliases.load(func() ([]string, error) { + return l.GetAliases(ctx, s.ID) + }) +} + +func (s *Studio) LoadStashIDs(ctx context.Context, l StashIDLoader) error { + return s.StashIDs.load(func() ([]StashID, error) { + return l.GetStashIDs(ctx, s.ID) + }) } -func (s *Studios) New() interface{} { - return &Studio{} +func (s *Studio) LoadRelationships(ctx context.Context, l PerformerReader) error { + if err := s.LoadAliases(ctx, l); err != nil { + return err + } + + if err := s.LoadStashIDs(ctx, l); err != nil { + return err + } + + return nil } diff --git a/pkg/models/model_tag.go b/pkg/models/model_tag.go index e07eee77287..f8c49c5321f 100644 --- a/pkg/models/model_tag.go +++ b/pkg/models/model_tag.go @@ -13,6 +13,14 @@ type Tag struct { UpdatedAt time.Time `json:"updated_at"` } +func NewTag() Tag { + currentTime := time.Now() + return Tag{ + CreatedAt: currentTime, + UpdatedAt: currentTime, + } +} + type TagPartial struct { Name OptionalString Description OptionalString @@ -21,43 +29,14 @@ type TagPartial struct { UpdatedAt OptionalTime } -type TagPath struct { - Tag - Path string `json:"path"` -} - -func NewTag(name string) *Tag { - currentTime := time.Now() - return &Tag{ - Name: name, - CreatedAt: currentTime, - UpdatedAt: currentTime, - } -} - func NewTagPartial() TagPartial { - updatedTime := time.Now() + currentTime := time.Now() return TagPartial{ - UpdatedAt: NewOptionalTime(updatedTime), + UpdatedAt: NewOptionalTime(currentTime), } } -type Tags []*Tag - -func (t *Tags) Append(o interface{}) { - *t = append(*t, o.(*Tag)) -} - -func (t *Tags) New() interface{} { - return &Tag{} -} - -type TagPaths []*TagPath - -func (t *TagPaths) Append(o interface{}) { - *t = append(*t, o.(*TagPath)) -} - -func (t *TagPaths) New() interface{} { - return &TagPath{} +type TagPath struct { + Tag + Path string `json:"path"` } diff --git a/pkg/models/movie.go b/pkg/models/movie.go index d00b3f49106..be7effad376 100644 --- a/pkg/models/movie.go +++ b/pkg/models/movie.go @@ -1,7 +1,5 @@ package models -import "context" - type MovieFilterType struct { Name *StringCriterionInput `json:"name"` Director *StringCriterionInput `json:"director"` @@ -27,37 +25,3 @@ type MovieFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type MovieReader interface { - Find(ctx context.Context, id int) (*Movie, error) - FindMany(ctx context.Context, ids []int) ([]*Movie, error) - // FindBySceneID(sceneID int) ([]*Movie, error) - FindByName(ctx context.Context, name string, nocase bool) (*Movie, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Movie, error) - All(ctx context.Context) ([]*Movie, error) - Count(ctx context.Context) (int, error) - Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) - QueryCount(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) (int, error) - GetFrontImage(ctx context.Context, movieID int) ([]byte, error) - HasFrontImage(ctx context.Context, movieID int) (bool, error) - GetBackImage(ctx context.Context, movieID int) ([]byte, error) - HasBackImage(ctx context.Context, movieID int) (bool, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) - CountByPerformerID(ctx context.Context, performerID int) (int, error) - FindByStudioID(ctx context.Context, studioID int) ([]*Movie, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) -} - -type MovieWriter interface { - Create(ctx context.Context, newMovie *Movie) error - UpdatePartial(ctx context.Context, id int, updatedMovie MoviePartial) (*Movie, error) - Update(ctx context.Context, updatedMovie *Movie) error - Destroy(ctx context.Context, id int) error - UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error - UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error -} - -type MovieReaderWriter interface { - MovieReader - MovieWriter -} diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 78d0a8995d0..3097c0ebf3b 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -1,7 +1,6 @@ package models import ( - "context" "fmt" "io" "strconv" @@ -194,43 +193,75 @@ type PerformerFilterType struct { UpdatedAt *TimestampCriterionInput `json:"updated_at"` } -type PerformerFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Performer, error) +type PerformerCreateInput struct { + Name string `json:"name"` + Disambiguation *string `json:"disambiguation"` + URL *string `json:"url"` + Gender *GenderEnum `json:"gender"` + Birthdate *string `json:"birthdate"` + Ethnicity *string `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *string `json:"eye_color"` + Height *string `json:"height"` + HeightCm *int `json:"height_cm"` + Measurements *string `json:"measurements"` + FakeTits *string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumisedEnum `json:"circumcised"` + CareerLength *string `json:"career_length"` + Tattoos *string `json:"tattoos"` + Piercings *string `json:"piercings"` + Aliases *string `json:"aliases"` + AliasList []string `json:"alias_list"` + Twitter *string `json:"twitter"` + Instagram *string `json:"instagram"` + Favorite *bool `json:"favorite"` + TagIds []string `json:"tag_ids"` + // This should be a URL or a base64 encoded data URL + Image *string `json:"image"` + StashIds []StashID `json:"stash_ids"` + Rating *int `json:"rating"` + Rating100 *int `json:"rating100"` + Details *string `json:"details"` + DeathDate *string `json:"death_date"` + HairColor *string `json:"hair_color"` + Weight *int `json:"weight"` + IgnoreAutoTag *bool `json:"ignore_auto_tag"` } -type PerformerReader interface { - Find(ctx context.Context, id int) (*Performer, error) - PerformerFinder - FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error) - FindByImageID(ctx context.Context, imageID int) ([]*Performer, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Performer, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Performer, error) - FindByStashID(ctx context.Context, stashID StashID) ([]*Performer, error) - FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Performer, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Performer, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error) - Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) - QueryCount(ctx context.Context, galleryFilter *PerformerFilterType, findFilter *FindFilterType) (int, error) - AliasLoader - GetImage(ctx context.Context, performerID int) ([]byte, error) - HasImage(ctx context.Context, performerID int) (bool, error) - StashIDLoader - TagIDLoader -} - -type PerformerWriter interface { - Create(ctx context.Context, newPerformer *Performer) error - UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) - Update(ctx context.Context, updatedPerformer *Performer) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, performerID int, image []byte) error -} - -type PerformerReaderWriter interface { - PerformerReader - PerformerWriter +type PerformerUpdateInput struct { + ID string `json:"id"` + Name *string `json:"name"` + Disambiguation *string `json:"disambiguation"` + URL *string `json:"url"` + Gender *GenderEnum `json:"gender"` + Birthdate *string `json:"birthdate"` + Ethnicity *string `json:"ethnicity"` + Country *string `json:"country"` + EyeColor *string `json:"eye_color"` + Height *string `json:"height"` + HeightCm *int `json:"height_cm"` + Measurements *string `json:"measurements"` + FakeTits *string `json:"fake_tits"` + PenisLength *float64 `json:"penis_length"` + Circumcised *CircumisedEnum `json:"circumcised"` + CareerLength *string `json:"career_length"` + Tattoos *string `json:"tattoos"` + Piercings *string `json:"piercings"` + Aliases *string `json:"aliases"` + AliasList []string `json:"alias_list"` + Twitter *string `json:"twitter"` + Instagram *string `json:"instagram"` + Favorite *bool `json:"favorite"` + TagIds []string `json:"tag_ids"` + // This should be a URL or a base64 encoded data URL + Image *string `json:"image"` + StashIds []StashID `json:"stash_ids"` + Rating *int `json:"rating"` + Rating100 *int `json:"rating100"` + Details *string `json:"details"` + DeathDate *string `json:"death_date"` + HairColor *string `json:"hair_color"` + Weight *int `json:"weight"` + IgnoreAutoTag *bool `json:"ignore_auto_tag"` } diff --git a/pkg/models/relationships.go b/pkg/models/relationships.go index f59e7d92e06..2c2bc60b10b 100644 --- a/pkg/models/relationships.go +++ b/pkg/models/relationships.go @@ -1,15 +1,15 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) +import "context" type SceneIDLoader interface { GetSceneIDs(ctx context.Context, relatedID int) ([]int, error) } +type ImageIDLoader interface { + GetImageIDs(ctx context.Context, relatedID int) ([]int, error) +} + type GalleryIDLoader interface { GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) } @@ -22,6 +22,10 @@ type TagIDLoader interface { GetTagIDs(ctx context.Context, relatedID int) ([]int, error) } +type FileIDLoader interface { + GetManyFileIDs(ctx context.Context, ids []int) ([][]FileID, error) +} + type SceneMovieLoader interface { GetMovies(ctx context.Context, id int) ([]MoviesScenes, error) } @@ -31,11 +35,11 @@ type StashIDLoader interface { } type VideoFileLoader interface { - GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) + GetFiles(ctx context.Context, relatedID int) ([]*VideoFile, error) } type FileLoader interface { - GetFiles(ctx context.Context, relatedID int) ([]file.File, error) + GetFiles(ctx context.Context, relatedID int) ([]File, error) } type AliasLoader interface { @@ -204,6 +208,19 @@ func (r RelatedStashIDs) List() []StashID { return r.list } +// ForID returns the StashID object for the given endpoint. Returns nil if not found. +func (r *RelatedStashIDs) ForEndpoint(endpoint string) *StashID { + r.mustLoaded() + + for _, v := range r.list { + if v.Endpoint == endpoint { + return &v + } + } + + return nil +} + func (r *RelatedStashIDs) load(fn func() ([]StashID, error)) error { if r.Loaded() { return nil @@ -224,12 +241,12 @@ func (r *RelatedStashIDs) load(fn func() ([]StashID, error)) error { } type RelatedVideoFiles struct { - primaryFile *file.VideoFile - files []*file.VideoFile + primaryFile *VideoFile + files []*VideoFile primaryLoaded bool } -func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles { +func NewRelatedVideoFiles(files []*VideoFile) RelatedVideoFiles { ret := RelatedVideoFiles{ files: files, primaryLoaded: true, @@ -242,12 +259,12 @@ func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles { return ret } -func (r *RelatedVideoFiles) SetPrimary(f *file.VideoFile) { +func (r *RelatedVideoFiles) SetPrimary(f *VideoFile) { r.primaryFile = f r.primaryLoaded = true } -func (r *RelatedVideoFiles) Set(f []*file.VideoFile) { +func (r *RelatedVideoFiles) Set(f []*VideoFile) { r.files = f if len(r.files) > 0 { r.primaryFile = r.files[0] @@ -267,7 +284,7 @@ func (r RelatedVideoFiles) PrimaryLoaded() bool { } // List returns the related files. Panics if the relationship has not been loaded. -func (r RelatedVideoFiles) List() []*file.VideoFile { +func (r RelatedVideoFiles) List() []*VideoFile { if !r.Loaded() { panic("relationship has not been loaded") } @@ -276,7 +293,7 @@ func (r RelatedVideoFiles) List() []*file.VideoFile { } // Primary returns the primary file. Panics if the relationship has not been loaded. -func (r RelatedVideoFiles) Primary() *file.VideoFile { +func (r RelatedVideoFiles) Primary() *VideoFile { if !r.PrimaryLoaded() { panic("relationship has not been loaded") } @@ -284,7 +301,7 @@ func (r RelatedVideoFiles) Primary() *file.VideoFile { return r.primaryFile } -func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error { +func (r *RelatedVideoFiles) load(fn func() ([]*VideoFile, error)) error { if r.Loaded() { return nil } @@ -304,7 +321,7 @@ func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error { return nil } -func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) error { +func (r *RelatedVideoFiles) loadPrimary(fn func() (*VideoFile, error)) error { if r.PrimaryLoaded() { return nil } @@ -321,12 +338,12 @@ func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) erro } type RelatedFiles struct { - primaryFile file.File - files []file.File + primaryFile File + files []File primaryLoaded bool } -func NewRelatedFiles(files []file.File) RelatedFiles { +func NewRelatedFiles(files []File) RelatedFiles { ret := RelatedFiles{ files: files, primaryLoaded: true, @@ -350,7 +367,7 @@ func (r RelatedFiles) PrimaryLoaded() bool { } // List returns the related files. Panics if the relationship has not been loaded. -func (r RelatedFiles) List() []file.File { +func (r RelatedFiles) List() []File { if !r.Loaded() { panic("relationship has not been loaded") } @@ -359,7 +376,7 @@ func (r RelatedFiles) List() []file.File { } // Primary returns the primary file. Panics if the relationship has not been loaded. -func (r RelatedFiles) Primary() file.File { +func (r RelatedFiles) Primary() File { if !r.PrimaryLoaded() { panic("relationship has not been loaded") } @@ -367,7 +384,7 @@ func (r RelatedFiles) Primary() file.File { return r.primaryFile } -func (r *RelatedFiles) load(fn func() ([]file.File, error)) error { +func (r *RelatedFiles) load(fn func() ([]File, error)) error { if r.Loaded() { return nil } @@ -387,7 +404,7 @@ func (r *RelatedFiles) load(fn func() ([]file.File, error)) error { return nil } -func (r *RelatedFiles) loadPrimary(fn func() (file.File, error)) error { +func (r *RelatedFiles) loadPrimary(fn func() (File, error)) error { if r.PrimaryLoaded() { return nil } diff --git a/pkg/models/repository.go b/pkg/models/repository.go index fe0e21dc004..9ba4eead11a 100644 --- a/pkg/models/repository.go +++ b/pkg/models/repository.go @@ -1,7 +1,6 @@ package models import ( - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/txn" ) @@ -14,8 +13,8 @@ type TxnManager interface { type Repository struct { TxnManager - File file.Store - Folder file.FolderStore + File FileReaderWriter + Folder FolderReaderWriter Gallery GalleryReaderWriter GalleryChapter GalleryChapterReaderWriter Image ImageReaderWriter diff --git a/pkg/models/repository_file.go b/pkg/models/repository_file.go new file mode 100644 index 00000000000..8ea9709db9b --- /dev/null +++ b/pkg/models/repository_file.go @@ -0,0 +1,88 @@ +package models + +import ( + "context" + "io/fs" +) + +// FileGetter provides methods to get files by ID. +type FileGetter interface { + Find(ctx context.Context, id ...FileID) ([]File, error) +} + +// FileFinder provides methods to find files. +type FileFinder interface { + FileGetter + FindAllByPath(ctx context.Context, path string) ([]File, error) + FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) + FindByPath(ctx context.Context, path string) (File, error) + FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error) + FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error) +} + +// FileQueryer provides methods to query files. +type FileQueryer interface { + Query(ctx context.Context, options FileQueryOptions) (*FileQueryResult, error) +} + +// FileCounter provides methods to count files. +type FileCounter interface { + CountAllInPaths(ctx context.Context, p []string) (int, error) + CountByFolderID(ctx context.Context, folderID FolderID) (int, error) +} + +// FileCreator provides methods to create files. +type FileCreator interface { + Create(ctx context.Context, f File) error +} + +// FileUpdater provides methods to update files. +type FileUpdater interface { + Update(ctx context.Context, f File) error +} + +// FileDestroyer provides methods to destroy files. +type FileDestroyer interface { + Destroy(ctx context.Context, id FileID) error +} + +type FileFinderCreator interface { + FileFinder + FileCreator +} + +type FileFinderUpdater interface { + FileFinder + FileUpdater +} + +type FileFinderDestroyer interface { + FileFinder + FileDestroyer +} + +// FileReader provides all methods to read files. +type FileReader interface { + FileFinder + FileQueryer + FileCounter + + GetCaptions(ctx context.Context, fileID FileID) ([]*VideoCaption, error) + IsPrimary(ctx context.Context, fileID FileID) (bool, error) +} + +// FileWriter provides all methods to modify files. +type FileWriter interface { + FileCreator + FileUpdater + FileDestroyer + + UpdateCaptions(ctx context.Context, fileID FileID, captions []*VideoCaption) error +} + +// FileReaderWriter provides all file methods. +type FileReaderWriter interface { + FileReader + FileWriter +} diff --git a/pkg/models/repository_folder.go b/pkg/models/repository_folder.go new file mode 100644 index 00000000000..c3f82f52942 --- /dev/null +++ b/pkg/models/repository_folder.go @@ -0,0 +1,64 @@ +package models + +import "context" + +// FolderGetter provides methods to get folders by ID. +type FolderGetter interface { + Find(ctx context.Context, id FolderID) (*Folder, error) +} + +// FolderFinder provides methods to find folders. +type FolderFinder interface { + FolderGetter + FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) + FindByPath(ctx context.Context, path string) (*Folder, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error) + FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) +} + +type FolderCounter interface { + CountAllInPaths(ctx context.Context, p []string) (int, error) +} + +// FolderCreator provides methods to create folders. +type FolderCreator interface { + Create(ctx context.Context, f *Folder) error +} + +// FolderUpdater provides methods to update folders. +type FolderUpdater interface { + Update(ctx context.Context, f *Folder) error +} + +type FolderDestroyer interface { + Destroy(ctx context.Context, id FolderID) error +} + +type FolderFinderCreator interface { + FolderFinder + FolderCreator +} + +type FolderFinderDestroyer interface { + FolderFinder + FolderDestroyer +} + +// FolderReader provides all methods to read folders. +type FolderReader interface { + FolderFinder + FolderCounter +} + +// FolderWriter provides all methods to modify folders. +type FolderWriter interface { + FolderCreator + FolderUpdater + FolderDestroyer +} + +// FolderReaderWriter provides all folder methods. +type FolderReaderWriter interface { + FolderReader + FolderWriter +} diff --git a/pkg/models/repository_gallery.go b/pkg/models/repository_gallery.go new file mode 100644 index 00000000000..64019886cc7 --- /dev/null +++ b/pkg/models/repository_gallery.go @@ -0,0 +1,91 @@ +package models + +import "context" + +// GalleryGetter provides methods to get galleries by ID. +type GalleryGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Gallery, error) + Find(ctx context.Context, id int) (*Gallery, error) +} + +// GalleryFinder provides methods to find galleries. +type GalleryFinder interface { + GalleryGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Gallery, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error) + FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error) + FindByPath(ctx context.Context, path string) ([]*Gallery, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Gallery, error) + FindByFolderID(ctx context.Context, folderID FolderID) ([]*Gallery, error) + FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error) + FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error) + FindUserGalleryByTitle(ctx context.Context, title string) ([]*Gallery, error) +} + +// GalleryQueryer provides methods to query galleries. +type GalleryQueryer interface { + Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error) + QueryCount(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) (int, error) +} + +// GalleryCounter provides methods to count galleries. +type GalleryCounter interface { + Count(ctx context.Context) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) +} + +// GalleryCreator provides methods to create galleries. +type GalleryCreator interface { + Create(ctx context.Context, newGallery *Gallery, fileIDs []FileID) error +} + +// GalleryUpdater provides methods to update galleries. +type GalleryUpdater interface { + Update(ctx context.Context, updatedGallery *Gallery) error + UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) + UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error +} + +// GalleryDestroyer provides methods to destroy galleries. +type GalleryDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type GalleryCreatorUpdater interface { + GalleryCreator + GalleryUpdater +} + +// GalleryReader provides all methods to read galleries. +type GalleryReader interface { + GalleryFinder + GalleryQueryer + GalleryCounter + + FileIDLoader + ImageIDLoader + SceneIDLoader + PerformerIDLoader + TagIDLoader + FileLoader + + All(ctx context.Context) ([]*Gallery, error) +} + +// GalleryWriter provides all methods to modify galleries. +type GalleryWriter interface { + GalleryCreator + GalleryUpdater + GalleryDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + AddImages(ctx context.Context, galleryID int, imageIDs ...int) error + RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error +} + +// GalleryReaderWriter provides all gallery methods. +type GalleryReaderWriter interface { + GalleryReader + GalleryWriter +} diff --git a/pkg/models/repository_gallery_chapter.go b/pkg/models/repository_gallery_chapter.go new file mode 100644 index 00000000000..5a926a0003f --- /dev/null +++ b/pkg/models/repository_gallery_chapter.go @@ -0,0 +1,55 @@ +package models + +import "context" + +// GalleryChapterGetter provides methods to get gallery chapters by ID. +type GalleryChapterGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*GalleryChapter, error) + Find(ctx context.Context, id int) (*GalleryChapter, error) +} + +// GalleryChapterFinder provides methods to find gallery chapters. +type GalleryChapterFinder interface { + GalleryChapterGetter + FindByGalleryID(ctx context.Context, galleryID int) ([]*GalleryChapter, error) +} + +// GalleryChapterCreator provides methods to create gallery chapters. +type GalleryChapterCreator interface { + Create(ctx context.Context, newGalleryChapter *GalleryChapter) error +} + +// GalleryChapterUpdater provides methods to update gallery chapters. +type GalleryChapterUpdater interface { + Update(ctx context.Context, updatedGalleryChapter *GalleryChapter) error + UpdatePartial(ctx context.Context, id int, updatedGalleryChapter GalleryChapterPartial) (*GalleryChapter, error) +} + +// GalleryChapterDestroyer provides methods to destroy gallery chapters. +type GalleryChapterDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type GalleryChapterCreatorUpdater interface { + GalleryChapterCreator + GalleryChapterUpdater +} + +// GalleryChapterReader provides all methods to read gallery chapters. +type GalleryChapterReader interface { + GalleryChapterFinder +} + +// GalleryChapterWriter provides all methods to modify gallery chapters. +type GalleryChapterWriter interface { + GalleryChapterCreator + GalleryChapterUpdater + GalleryChapterDestroyer +} + +// GalleryChapterReaderWriter provides all gallery chapter methods. +type GalleryChapterReaderWriter interface { + GalleryChapterReader + GalleryChapterWriter +} diff --git a/pkg/models/repository_image.go b/pkg/models/repository_image.go new file mode 100644 index 00000000000..1bf8ba440de --- /dev/null +++ b/pkg/models/repository_image.go @@ -0,0 +1,93 @@ +package models + +import "context" + +// ImageGetter provides methods to get images by ID. +type ImageGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Image, error) + Find(ctx context.Context, id int) (*Image, error) +} + +// ImageFinder provides methods to find images. +type ImageFinder interface { + ImageGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Image, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Image, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Image, error) + FindByFolderID(ctx context.Context, fileID FolderID) ([]*Image, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Image, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) +} + +// ImageQueryer provides methods to query images. +type ImageQueryer interface { + Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error) + QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) +} + +// ImageCounter provides methods to count images. +type ImageCounter interface { + Count(ctx context.Context) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) + CountByGalleryID(ctx context.Context, galleryID int) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) +} + +// ImageCreator provides methods to create images. +type ImageCreator interface { + Create(ctx context.Context, newImage *Image, fileIDs []FileID) error +} + +// ImageUpdater provides methods to update images. +type ImageUpdater interface { + Update(ctx context.Context, updatedImage *Image) error + UpdatePartial(ctx context.Context, id int, partial ImagePartial) (*Image, error) + UpdatePerformers(ctx context.Context, imageID int, performerIDs []int) error + UpdateTags(ctx context.Context, imageID int, tagIDs []int) error +} + +// ImageDestroyer provides methods to destroy images. +type ImageDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type ImageCreatorUpdater interface { + ImageCreator + ImageUpdater +} + +// ImageReader provides all methods to read images. +type ImageReader interface { + ImageFinder + ImageQueryer + ImageCounter + + URLLoader + FileIDLoader + GalleryIDLoader + PerformerIDLoader + TagIDLoader + FileLoader + + All(ctx context.Context) ([]*Image, error) + Size(ctx context.Context) (float64, error) +} + +// ImageWriter provides all methods to modify images. +type ImageWriter interface { + ImageCreator + ImageUpdater + ImageDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + IncrementOCounter(ctx context.Context, id int) (int, error) + DecrementOCounter(ctx context.Context, id int) (int, error) + ResetOCounter(ctx context.Context, id int) (int, error) +} + +// ImageReaderWriter provides all image methods. +type ImageReaderWriter interface { + ImageReader + ImageWriter +} diff --git a/pkg/models/repository_movie.go b/pkg/models/repository_movie.go new file mode 100644 index 00000000000..9234ea7a5d1 --- /dev/null +++ b/pkg/models/repository_movie.go @@ -0,0 +1,86 @@ +package models + +import "context" + +// MovieGetter provides methods to get movies by ID. +type MovieGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Movie, error) + Find(ctx context.Context, id int) (*Movie, error) +} + +// MovieFinder provides methods to find movies. +type MovieFinder interface { + MovieGetter + FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) + FindByStudioID(ctx context.Context, studioID int) ([]*Movie, error) + FindByName(ctx context.Context, name string, nocase bool) (*Movie, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Movie, error) +} + +// MovieQueryer provides methods to query movies. +type MovieQueryer interface { + Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) + QueryCount(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) (int, error) +} + +// MovieCounter provides methods to count movies. +type MovieCounter interface { + Count(ctx context.Context) (int, error) + CountByPerformerID(ctx context.Context, performerID int) (int, error) + CountByStudioID(ctx context.Context, studioID int) (int, error) +} + +// MovieCreator provides methods to create movies. +type MovieCreator interface { + Create(ctx context.Context, newMovie *Movie) error +} + +// MovieUpdater provides methods to update movies. +type MovieUpdater interface { + Update(ctx context.Context, updatedMovie *Movie) error + UpdatePartial(ctx context.Context, id int, updatedMovie MoviePartial) (*Movie, error) + UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error + UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error +} + +// MovieDestroyer provides methods to destroy movies. +type MovieDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type MovieCreatorUpdater interface { + MovieCreator + MovieUpdater +} + +type MovieFinderCreator interface { + MovieFinder + MovieCreator +} + +// MovieReader provides all methods to read movies. +type MovieReader interface { + MovieFinder + MovieQueryer + MovieCounter + + All(ctx context.Context) ([]*Movie, error) + GetFrontImage(ctx context.Context, movieID int) ([]byte, error) + HasFrontImage(ctx context.Context, movieID int) (bool, error) + GetBackImage(ctx context.Context, movieID int) ([]byte, error) + HasBackImage(ctx context.Context, movieID int) (bool, error) +} + +// MovieWriter provides all methods to modify movies. +type MovieWriter interface { + MovieCreator + MovieUpdater + MovieDestroyer +} + +// MovieReaderWriter provides all movie methods. +type MovieReaderWriter interface { + MovieReader + MovieWriter +} diff --git a/pkg/models/repository_performer.go b/pkg/models/repository_performer.go new file mode 100644 index 00000000000..aac7e0488e4 --- /dev/null +++ b/pkg/models/repository_performer.go @@ -0,0 +1,98 @@ +package models + +import "context" + +// PerformerGetter provides methods to get performers by ID. +type PerformerGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Performer, error) + Find(ctx context.Context, id int) (*Performer, error) +} + +// PerformerFinder provides methods to find performers. +type PerformerFinder interface { + PerformerGetter + FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error) + FindByImageID(ctx context.Context, imageID int) ([]*Performer, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Performer, error) + FindByStashID(ctx context.Context, stashID StashID) ([]*Performer, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Performer, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Performer, error) +} + +// PerformerQueryer provides methods to query performers. +type PerformerQueryer interface { + Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) + QueryCount(ctx context.Context, galleryFilter *PerformerFilterType, findFilter *FindFilterType) (int, error) +} + +type PerformerAutoTagQueryer interface { + PerformerQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error) +} + +// PerformerCounter provides methods to count performers. +type PerformerCounter interface { + Count(ctx context.Context) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) +} + +// PerformerCreator provides methods to create performers. +type PerformerCreator interface { + Create(ctx context.Context, newPerformer *Performer) error +} + +// PerformerUpdater provides methods to update performers. +type PerformerUpdater interface { + Update(ctx context.Context, updatedPerformer *Performer) error + UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) + UpdateImage(ctx context.Context, performerID int, image []byte) error +} + +// PerformerDestroyer provides methods to destroy performers. +type PerformerDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type PerformerFinderCreator interface { + PerformerFinder + PerformerCreator +} + +type PerformerCreatorUpdater interface { + PerformerCreator + PerformerUpdater +} + +// PerformerReader provides all methods to read performers. +type PerformerReader interface { + PerformerFinder + PerformerQueryer + PerformerAutoTagQueryer + PerformerCounter + + AliasLoader + StashIDLoader + TagIDLoader + + All(ctx context.Context) ([]*Performer, error) + GetImage(ctx context.Context, performerID int) ([]byte, error) + HasImage(ctx context.Context, performerID int) (bool, error) +} + +// PerformerWriter provides all methods to modify performers. +type PerformerWriter interface { + PerformerCreator + PerformerUpdater + PerformerDestroyer +} + +// PerformerReaderWriter provides all performer methods. +type PerformerReaderWriter interface { + PerformerReader + PerformerWriter +} diff --git a/pkg/models/repository_scene.go b/pkg/models/repository_scene.go new file mode 100644 index 00000000000..fdd839ed6b3 --- /dev/null +++ b/pkg/models/repository_scene.go @@ -0,0 +1,115 @@ +package models + +import "context" + +// SceneGetter provides methods to get scenes by ID. +type SceneGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Scene, error) + Find(ctx context.Context, id int) (*Scene, error) +} + +// SceneFinder provides methods to find scenes. +type SceneFinder interface { + SceneGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Scene, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Scene, error) + FindByOSHash(ctx context.Context, oshash string) ([]*Scene, error) + FindByPath(ctx context.Context, path string) ([]*Scene, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Scene, error) + FindByPrimaryFileID(ctx context.Context, fileID FileID) ([]*Scene, error) + FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) + FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) + FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) + FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) +} + +// SceneQueryer provides methods to query scenes. +type SceneQueryer interface { + Query(ctx context.Context, options SceneQueryOptions) (*SceneQueryResult, error) + QueryCount(ctx context.Context, sceneFilter *SceneFilterType, findFilter *FindFilterType) (int, error) +} + +// SceneCounter provides methods to count scenes. +type SceneCounter interface { + Count(ctx context.Context) (int, error) + CountByPerformerID(ctx context.Context, performerID int) (int, error) + CountByMovieID(ctx context.Context, movieID int) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) + CountByStudioID(ctx context.Context, studioID int) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) + CountMissingChecksum(ctx context.Context) (int, error) + CountMissingOSHash(ctx context.Context) (int, error) + OCount(ctx context.Context) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) + PlayCount(ctx context.Context) (int, error) + UniqueScenePlayCount(ctx context.Context) (int, error) +} + +// SceneCreator provides methods to create scenes. +type SceneCreator interface { + Create(ctx context.Context, newScene *Scene, fileIDs []FileID) error +} + +// SceneUpdater provides methods to update scenes. +type SceneUpdater interface { + Update(ctx context.Context, updatedScene *Scene) error + UpdatePartial(ctx context.Context, id int, updatedScene ScenePartial) (*Scene, error) + UpdateCover(ctx context.Context, sceneID int, cover []byte) error +} + +// SceneDestroyer provides methods to destroy scenes. +type SceneDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type SceneCreatorUpdater interface { + SceneCreator + SceneUpdater +} + +// SceneReader provides all methods to read scenes. +type SceneReader interface { + SceneFinder + SceneQueryer + SceneCounter + + URLLoader + FileIDLoader + GalleryIDLoader + PerformerIDLoader + TagIDLoader + SceneMovieLoader + StashIDLoader + VideoFileLoader + + All(ctx context.Context) ([]*Scene, error) + Wall(ctx context.Context, q *string) ([]*Scene, error) + Size(ctx context.Context) (float64, error) + Duration(ctx context.Context) (float64, error) + PlayDuration(ctx context.Context) (float64, error) + GetCover(ctx context.Context, sceneID int) ([]byte, error) + HasCover(ctx context.Context, sceneID int) (bool, error) +} + +// SceneWriter provides all methods to modify scenes. +type SceneWriter interface { + SceneCreator + SceneUpdater + SceneDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error + AssignFiles(ctx context.Context, sceneID int, fileID []FileID) error + IncrementOCounter(ctx context.Context, id int) (int, error) + DecrementOCounter(ctx context.Context, id int) (int, error) + ResetOCounter(ctx context.Context, id int) (int, error) + SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) + IncrementWatchCount(ctx context.Context, sceneID int) (int, error) +} + +// SceneReaderWriter provides all scene methods. +type SceneReaderWriter interface { + SceneReader + SceneWriter +} diff --git a/pkg/models/repository_scene_marker.go b/pkg/models/repository_scene_marker.go new file mode 100644 index 00000000000..d35ec762f51 --- /dev/null +++ b/pkg/models/repository_scene_marker.go @@ -0,0 +1,76 @@ +package models + +import "context" + +// SceneMarkerGetter provides methods to get scene markers by ID. +type SceneMarkerGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*SceneMarker, error) + Find(ctx context.Context, id int) (*SceneMarker, error) +} + +// SceneMarkerFinder provides methods to find scene markers. +type SceneMarkerFinder interface { + SceneMarkerGetter + FindBySceneID(ctx context.Context, sceneID int) ([]*SceneMarker, error) +} + +// SceneMarkerQueryer provides methods to query scene markers. +type SceneMarkerQueryer interface { + Query(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]*SceneMarker, int, error) + QueryCount(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) (int, error) +} + +// SceneMarkerCounter provides methods to count scene markers. +type SceneMarkerCounter interface { + Count(ctx context.Context) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) +} + +// SceneMarkerCreator provides methods to create scene markers. +type SceneMarkerCreator interface { + Create(ctx context.Context, newSceneMarker *SceneMarker) error +} + +// SceneMarkerUpdater provides methods to update scene markers. +type SceneMarkerUpdater interface { + Update(ctx context.Context, updatedSceneMarker *SceneMarker) error + UpdatePartial(ctx context.Context, id int, updatedSceneMarker SceneMarkerPartial) (*SceneMarker, error) + UpdateTags(ctx context.Context, markerID int, tagIDs []int) error +} + +// SceneMarkerDestroyer provides methods to destroy scene markers. +type SceneMarkerDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type SceneMarkerCreatorUpdater interface { + SceneMarkerCreator + SceneMarkerUpdater +} + +// SceneMarkerReader provides all methods to read scene markers. +type SceneMarkerReader interface { + SceneMarkerFinder + SceneMarkerQueryer + SceneMarkerCounter + + TagIDLoader + + All(ctx context.Context) ([]*SceneMarker, error) + Wall(ctx context.Context, q *string) ([]*SceneMarker, error) + GetMarkerStrings(ctx context.Context, q *string, sort *string) ([]*MarkerStringsResultType, error) +} + +// SceneMarkerWriter provides all methods to modify scene markers. +type SceneMarkerWriter interface { + SceneMarkerCreator + SceneMarkerUpdater + SceneMarkerDestroyer +} + +// SceneMarkerReaderWriter provides all scene marker methods. +type SceneMarkerReaderWriter interface { + SceneMarkerReader + SceneMarkerWriter +} diff --git a/pkg/models/repository_studio.go b/pkg/models/repository_studio.go new file mode 100644 index 00000000000..272bf8fed23 --- /dev/null +++ b/pkg/models/repository_studio.go @@ -0,0 +1,94 @@ +package models + +import "context" + +// StudioGetter provides methods to get studios by ID. +type StudioGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Studio, error) + Find(ctx context.Context, id int) (*Studio, error) +} + +// StudioFinder provides methods to find studios. +type StudioFinder interface { + StudioGetter + FindChildren(ctx context.Context, id int) ([]*Studio, error) + FindBySceneID(ctx context.Context, sceneID int) (*Studio, error) + FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Studio, error) + FindByName(ctx context.Context, name string, nocase bool) (*Studio, error) +} + +// StudioQueryer provides methods to query studios. +type StudioQueryer interface { + Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) +} + +type StudioAutoTagQueryer interface { + StudioQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Studio, error) +} + +// StudioCounter provides methods to count studios. +type StudioCounter interface { + Count(ctx context.Context) (int, error) +} + +// StudioCreator provides methods to create studios. +type StudioCreator interface { + Create(ctx context.Context, newStudio *Studio) error +} + +// StudioUpdater provides methods to update studios. +type StudioUpdater interface { + Update(ctx context.Context, updatedStudio *Studio) error + UpdatePartial(ctx context.Context, updatedStudio StudioPartial) (*Studio, error) + UpdateImage(ctx context.Context, studioID int, image []byte) error +} + +// StudioDestroyer provides methods to destroy studios. +type StudioDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type StudioFinderCreator interface { + StudioFinder + StudioCreator +} + +type StudioCreatorUpdater interface { + StudioCreator + StudioUpdater +} + +// StudioReader provides all methods to read studios. +type StudioReader interface { + StudioFinder + StudioQueryer + StudioAutoTagQueryer + StudioCounter + + AliasLoader + StashIDLoader + + All(ctx context.Context) ([]*Studio, error) + GetImage(ctx context.Context, studioID int) ([]byte, error) + HasImage(ctx context.Context, studioID int) (bool, error) +} + +// StudioWriter provides all methods to modify studios. +type StudioWriter interface { + StudioCreator + StudioUpdater + StudioDestroyer +} + +// StudioReaderWriter provides all studio methods. +type StudioReaderWriter interface { + StudioReader + StudioWriter +} diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go new file mode 100644 index 00000000000..ca8f6971bf7 --- /dev/null +++ b/pkg/models/repository_tag.go @@ -0,0 +1,106 @@ +package models + +import "context" + +// TagGetter provides methods to get tags by ID. +type TagGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Tag, error) + Find(ctx context.Context, id int) (*Tag, error) +} + +// TagFinder provides methods to find tags. +type TagFinder interface { + TagGetter + FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) + FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) + FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error) + FindByChildTagID(ctx context.Context, childID int) ([]*Tag, error) + FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error) + FindByImageID(ctx context.Context, imageID int) ([]*Tag, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error) + FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error) + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error) + FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) +} + +// TagQueryer provides methods to query tags. +type TagQueryer interface { + Query(ctx context.Context, tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) +} + +type TagAutoTagQueryer interface { + TagQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Tag, error) +} + +// TagCounter provides methods to count tags. +type TagCounter interface { + Count(ctx context.Context) (int, error) + CountByParentTagID(ctx context.Context, parentID int) (int, error) + CountByChildTagID(ctx context.Context, childID int) (int, error) +} + +// TagCreator provides methods to create tags. +type TagCreator interface { + Create(ctx context.Context, newTag *Tag) error +} + +// TagUpdater provides methods to update tags. +type TagUpdater interface { + Update(ctx context.Context, updatedTag *Tag) error + UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) + UpdateAliases(ctx context.Context, tagID int, aliases []string) error + UpdateImage(ctx context.Context, tagID int, image []byte) error + UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error + UpdateChildTags(ctx context.Context, tagID int, parentIDs []int) error +} + +// TagDestroyer provides methods to destroy tags. +type TagDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type TagFinderCreator interface { + TagFinder + TagCreator +} + +type TagCreatorUpdater interface { + TagCreator + TagUpdater +} + +// TagReader provides all methods to read tags. +type TagReader interface { + TagFinder + TagQueryer + TagAutoTagQueryer + TagCounter + + AliasLoader + + All(ctx context.Context) ([]*Tag, error) + GetImage(ctx context.Context, tagID int) ([]byte, error) + HasImage(ctx context.Context, tagID int) (bool, error) +} + +// TagWriter provides all methods to modify tags. +type TagWriter interface { + TagCreator + TagUpdater + TagDestroyer + + Merge(ctx context.Context, source []int, destination int) error +} + +// TagReaderWriter provides all tags methods. +type TagReaderWriter interface { + TagReader + TagWriter +} diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 8f8d2eaf420..09ac117ad9d 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -1,10 +1,6 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) +import "context" type PHashDuplicationCriterionInput struct { Duplicated *bool `json:"duplicated"` @@ -112,11 +108,69 @@ type SceneQueryResult struct { TotalDuration float64 TotalSize float64 - finder SceneFinder + getter SceneGetter scenes []*Scene resolveErr error } +type SceneMovieInput struct { + MovieID string `json:"movie_id"` + SceneIndex *int `json:"scene_index"` +} + +type SceneCreateInput struct { + Title *string `json:"title"` + Code *string `json:"code"` + Details *string `json:"details"` + Director *string `json:"director"` + URL *string `json:"url"` + Urls []string `json:"urls"` + Date *string `json:"date"` + Rating *int `json:"rating"` + Rating100 *int `json:"rating100"` + Organized *bool `json:"organized"` + StudioID *string `json:"studio_id"` + GalleryIds []string `json:"gallery_ids"` + PerformerIds []string `json:"performer_ids"` + Movies []SceneMovieInput `json:"movies"` + TagIds []string `json:"tag_ids"` + // This should be a URL or a base64 encoded data URL + CoverImage *string `json:"cover_image"` + StashIds []StashID `json:"stash_ids"` + // The first id will be assigned as primary. + // Files will be reassigned from existing scenes if applicable. + // Files must not already be primary for another scene. + FileIds []string `json:"file_ids"` +} + +type SceneUpdateInput struct { + ClientMutationID *string `json:"clientMutationId"` + ID string `json:"id"` + Title *string `json:"title"` + Code *string `json:"code"` + Details *string `json:"details"` + Director *string `json:"director"` + URL *string `json:"url"` + Urls []string `json:"urls"` + Date *string `json:"date"` + Rating *int `json:"rating"` + Rating100 *int `json:"rating100"` + OCounter *int `json:"o_counter"` + Organized *bool `json:"organized"` + StudioID *string `json:"studio_id"` + GalleryIds []string `json:"gallery_ids"` + PerformerIds []string `json:"performer_ids"` + Movies []SceneMovieInput `json:"movies"` + TagIds []string `json:"tag_ids"` + // This should be a URL or a base64 encoded data URL + CoverImage *string `json:"cover_image"` + StashIds []StashID `json:"stash_ids"` + ResumeTime *float64 `json:"resume_time"` + PlayDuration *float64 `json:"play_duration"` + PlayCount *int `json:"play_count"` + PrimaryFileID *string `json:"primary_file_id"` +} + type SceneDestroyInput struct { ID string `json:"id"` DeleteFile *bool `json:"delete_file"` @@ -129,83 +183,16 @@ type ScenesDestroyInput struct { DeleteGenerated *bool `json:"delete_generated"` } -func NewSceneQueryResult(finder SceneFinder) *SceneQueryResult { +func NewSceneQueryResult(getter SceneGetter) *SceneQueryResult { return &SceneQueryResult{ - finder: finder, + getter: getter, } } func (r *SceneQueryResult) Resolve(ctx context.Context) ([]*Scene, error) { // cache results if r.scenes == nil && r.resolveErr == nil { - r.scenes, r.resolveErr = r.finder.FindMany(ctx, r.IDs) + r.scenes, r.resolveErr = r.getter.FindMany(ctx, r.IDs) } return r.scenes, r.resolveErr } - -type SceneFinder interface { - // TODO - rename this to Find and remove existing method - FindMany(ctx context.Context, ids []int) ([]*Scene, error) -} - -type SceneReader interface { - SceneFinder - // TODO - remove this in another PR - Find(ctx context.Context, id int) (*Scene, error) - FindByChecksum(ctx context.Context, checksum string) ([]*Scene, error) - FindByOSHash(ctx context.Context, oshash string) ([]*Scene, error) - FindByPath(ctx context.Context, path string) ([]*Scene, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) - FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) - FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) - - URLLoader - GalleryIDLoader - PerformerIDLoader - TagIDLoader - SceneMovieLoader - StashIDLoader - VideoFileLoader - - CountByPerformerID(ctx context.Context, performerID int) (int, error) - OCountByPerformerID(ctx context.Context, performerID int) (int, error) - OCount(ctx context.Context) (int, error) - // FindByStudioID(studioID int) ([]*Scene, error) - FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) - CountByMovieID(ctx context.Context, movieID int) (int, error) - Count(ctx context.Context) (int, error) - PlayCount(ctx context.Context) (int, error) - UniqueScenePlayCount(ctx context.Context) (int, error) - Size(ctx context.Context) (float64, error) - Duration(ctx context.Context) (float64, error) - PlayDuration(ctx context.Context) (float64, error) - // SizeCount() (string, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - CountMissingChecksum(ctx context.Context) (int, error) - CountMissingOSHash(ctx context.Context) (int, error) - Wall(ctx context.Context, q *string) ([]*Scene, error) - All(ctx context.Context) ([]*Scene, error) - Query(ctx context.Context, options SceneQueryOptions) (*SceneQueryResult, error) - QueryCount(ctx context.Context, sceneFilter *SceneFilterType, findFilter *FindFilterType) (int, error) - GetCover(ctx context.Context, sceneID int) ([]byte, error) - HasCover(ctx context.Context, sceneID int) (bool, error) -} - -type SceneWriter interface { - Create(ctx context.Context, newScene *Scene, fileIDs []file.ID) error - Update(ctx context.Context, updatedScene *Scene) error - UpdatePartial(ctx context.Context, id int, updatedScene ScenePartial) (*Scene, error) - IncrementOCounter(ctx context.Context, id int) (int, error) - DecrementOCounter(ctx context.Context, id int) (int, error) - ResetOCounter(ctx context.Context, id int) (int, error) - SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) - IncrementWatchCount(ctx context.Context, id int) (int, error) - Destroy(ctx context.Context, id int) error - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - -type SceneReaderWriter interface { - SceneReader - SceneWriter -} diff --git a/pkg/models/scene_marker.go b/pkg/models/scene_marker.go index 673a547e975..4a10c0e2178 100644 --- a/pkg/models/scene_marker.go +++ b/pkg/models/scene_marker.go @@ -1,7 +1,5 @@ package models -import "context" - type SceneMarkerFilterType struct { // Filter to only include scene markers with this tag TagID *string `json:"tag_id"` @@ -28,30 +26,3 @@ type MarkerStringsResultType struct { ID string `json:"id"` Title string `json:"title"` } - -type SceneMarkerReader interface { - Find(ctx context.Context, id int) (*SceneMarker, error) - FindMany(ctx context.Context, ids []int) ([]*SceneMarker, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*SceneMarker, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - GetMarkerStrings(ctx context.Context, q *string, sort *string) ([]*MarkerStringsResultType, error) - Wall(ctx context.Context, q *string) ([]*SceneMarker, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*SceneMarker, error) - Query(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]*SceneMarker, int, error) - QueryCount(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) (int, error) - GetTagIDs(ctx context.Context, imageID int) ([]int, error) -} - -type SceneMarkerWriter interface { - Create(ctx context.Context, newSceneMarker *SceneMarker) error - Update(ctx context.Context, updatedSceneMarker *SceneMarker) error - UpdatePartial(ctx context.Context, id int, updatedSceneMarker SceneMarkerPartial) (*SceneMarker, error) - Destroy(ctx context.Context, id int) error - UpdateTags(ctx context.Context, markerID int, tagIDs []int) error -} - -type SceneMarkerReaderWriter interface { - SceneMarkerReader - SceneMarkerWriter -} diff --git a/pkg/models/studio.go b/pkg/models/studio.go index f98173d2a54..2d743db4bb6 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -1,7 +1,5 @@ package models -import "context" - type StudioFilterType struct { And *StudioFilterType `json:"AND"` Or *StudioFilterType `json:"OR"` @@ -38,38 +36,31 @@ type StudioFilterType struct { UpdatedAt *TimestampCriterionInput `json:"updated_at"` } -type StudioFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Studio, error) -} - -type StudioReader interface { - Find(ctx context.Context, id int) (*Studio, error) - StudioFinder - FindChildren(ctx context.Context, id int) ([]*Studio, error) - FindByName(ctx context.Context, name string, nocase bool) (*Studio, error) - FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error) - FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Studio, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Studio, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Studio, error) - Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) - GetImage(ctx context.Context, studioID int) ([]byte, error) - HasImage(ctx context.Context, studioID int) (bool, error) - AliasLoader - StashIDLoader -} - -type StudioWriter interface { - Create(ctx context.Context, newStudio *Studio) error - UpdatePartial(ctx context.Context, input StudioPartial) (*Studio, error) - Update(ctx context.Context, updatedStudio *Studio) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, studioID int, image []byte) error +type StudioCreateInput struct { + Name string `json:"name"` + URL *string `json:"url"` + ParentID *string `json:"parent_id"` + // This should be a URL or a base64 encoded data URL + Image *string `json:"image"` + StashIds []StashID `json:"stash_ids"` + Rating *int `json:"rating"` + Rating100 *int `json:"rating100"` + Details *string `json:"details"` + Aliases []string `json:"aliases"` + IgnoreAutoTag *bool `json:"ignore_auto_tag"` } -type StudioReaderWriter interface { - StudioReader - StudioWriter +type StudioUpdateInput struct { + ID string `json:"id"` + Name *string `json:"name"` + URL *string `json:"url"` + ParentID *string `json:"parent_id"` + // This should be a URL or a base64 encoded data URL + Image *string `json:"image"` + StashIds []StashID `json:"stash_ids"` + Rating *int `json:"rating"` + Rating100 *int `json:"rating100"` + Details *string `json:"details"` + Aliases []string `json:"aliases"` + IgnoreAutoTag *bool `json:"ignore_auto_tag"` } diff --git a/pkg/models/tag.go b/pkg/models/tag.go index 0ddcc1d86cd..b2cff5a0ebc 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -1,7 +1,5 @@ package models -import "context" - type TagFilterType struct { And *TagFilterType `json:"AND"` Or *TagFilterType `json:"OR"` @@ -39,49 +37,3 @@ type TagFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type TagFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Tag, error) -} - -type TagReader interface { - Find(ctx context.Context, id int) (*Tag, error) - TagFinder - FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error) - FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error) - FindByImageID(ctx context.Context, imageID int) ([]*Tag, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error) - FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) - FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error) - FindByChildTagID(ctx context.Context, childID int) ([]*Tag, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Tag, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Tag, error) - Query(ctx context.Context, tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) - GetImage(ctx context.Context, tagID int) ([]byte, error) - HasImage(ctx context.Context, tagID int) (bool, error) - GetAliases(ctx context.Context, tagID int) ([]string, error) - FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) - FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) -} - -type TagWriter interface { - Create(ctx context.Context, newTag *Tag) error - UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) - Update(ctx context.Context, updatedTag *Tag) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, tagID int, image []byte) error - UpdateAliases(ctx context.Context, tagID int, aliases []string) error - Merge(ctx context.Context, source []int, destination int) error - UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error - UpdateChildTags(ctx context.Context, tagID int, parentIDs []int) error -} - -type TagReaderWriter interface { - TagReader - TagWriter -} diff --git a/pkg/movie/export.go b/pkg/movie/export.go index 09963ce5e87..5a6c49aa364 100644 --- a/pkg/movie/export.go +++ b/pkg/movie/export.go @@ -8,7 +8,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/utils" ) @@ -18,7 +17,7 @@ type ImageGetter interface { } // ToJSON converts a Movie into its JSON equivalent. -func ToJSON(ctx context.Context, reader ImageGetter, studioReader studio.Finder, movie *models.Movie) (*jsonschema.Movie, error) { +func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Movie) (*jsonschema.Movie, error) { newMovieJSON := jsonschema.Movie{ Name: movie.Name, Aliases: movie.Aliases, diff --git a/pkg/movie/import.go b/pkg/movie/import.go index 75e08b0bb1f..8004798ae53 100644 --- a/pkg/movie/import.go +++ b/pkg/movie/import.go @@ -6,24 +6,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/utils" ) -type ImageUpdater interface { - UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error - UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error -} - -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedMovie *models.Movie) error - ImageUpdater +type ImporterReaderWriter interface { + models.MovieCreatorUpdater + FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) } type Importer struct { - ReaderWriter NameFinderCreatorUpdater - StudioWriter studio.NameFinderCreator + ReaderWriter ImporterReaderWriter + StudioWriter models.StudioFinderCreator Input jsonschema.Movie MissingRefBehaviour models.ImportMissingRefEnum @@ -116,11 +109,10 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := &models.Studio{ - Name: name, - } + newStudio := models.NewStudio() + newStudio.Name = name - err := i.StudioWriter.Create(ctx, newStudio) + err := i.StudioWriter.Create(ctx, &newStudio) if err != nil { return 0, err } diff --git a/pkg/movie/query.go b/pkg/movie/query.go index 3736f943798..3fac932a03d 100644 --- a/pkg/movie/query.go +++ b/pkg/movie/query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) (int, error) -} - -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.MovieQueryer, id int, depth *int) (int, error) { filter := &models.MovieFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/movie/update.go b/pkg/movie/update.go deleted file mode 100644 index 4111215e232..00000000000 --- a/pkg/movie/update.go +++ /dev/null @@ -1,12 +0,0 @@ -package movie - -import ( - "context" - - "github.com/stashapp/stash/pkg/models" -) - -type NameFinderCreator interface { - FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) - Create(ctx context.Context, newMovie *models.Movie) error -} diff --git a/pkg/performer/import.go b/pkg/performer/import.go index f84030a6ed7..9f57d97fe9a 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -10,19 +10,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedPerformer *models.Performer) error - UpdateImage(ctx context.Context, performerID int, image []byte) error +type ImporterReaderWriter interface { + models.PerformerCreatorUpdater + models.PerformerQueryer } type Importer struct { - ReaderWriter NameFinderCreatorUpdater - TagWriter tag.NameFinderCreator + ReaderWriter ImporterReaderWriter + TagWriter models.TagFinderCreator Input jsonschema.Performer MissingRefBehaviour models.ImportMissingRefEnum @@ -65,7 +63,7 @@ func (i *Importer) populateTags(ctx context.Context) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -100,17 +98,18 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { - newTag := models.NewTag(name) + newTag := models.NewTag() + newTag.Name = name - err := tagWriter.Create(ctx, newTag) + err := tagWriter.Create(ctx, &newTag) if err != nil { return nil, err } - ret = append(ret, newTag) + ret = append(ret, &newTag) } return ret, nil diff --git a/pkg/performer/query.go b/pkg/performer/query.go index b8df03a1c51..d85fa514866 100644 --- a/pkg/performer/query.go +++ b/pkg/performer/query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, galleryFilter *models.PerformerFilterType, findFilter *models.FindFilterType) (int, error) -} - -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.PerformerQueryer, id int, depth *int) (int, error) { filter := &models.PerformerFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -27,7 +19,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.PerformerQueryer, id int, depth *int) (int, error) { filter := &models.PerformerFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -39,7 +31,7 @@ func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, return r.QueryCount(ctx, filter, nil) } -func CountByAppearsWith(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByAppearsWith(ctx context.Context, r models.PerformerQueryer, id int) (int, error) { filter := &models.PerformerFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/performer/update.go b/pkg/performer/update.go deleted file mode 100644 index d846eb6ce93..00000000000 --- a/pkg/performer/update.go +++ /dev/null @@ -1,13 +0,0 @@ -package performer - -import ( - "context" - - "github.com/stashapp/stash/pkg/models" -) - -type NameFinderCreator interface { - FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Performer, error) - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) - Create(ctx context.Context, newPerformer *models.Performer) error -} diff --git a/pkg/scene/create.go b/pkg/scene/create.go index c2345d2ef95..428c636a771 100644 --- a/pkg/scene/create.go +++ b/pkg/scene/create.go @@ -6,12 +6,11 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" ) -func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []file.ID, coverImage []byte) (*models.Scene, error) { +func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) { // title must be set if no files are provided if input.Title == "" && len(fileIDs) == 0 { return nil, errors.New("title must be set if scene has no files") diff --git a/pkg/scene/delete.go b/pkg/scene/delete.go index c7e8fdcc4be..7426c390b4b 100644 --- a/pkg/scene/delete.go +++ b/pkg/scene/delete.go @@ -105,15 +105,6 @@ func (d *FileDeleter) MarkMarkerFiles(scene *models.Scene, seconds int) error { return d.Files(files) } -type Destroyer interface { - Destroy(ctx context.Context, id int) error -} - -type MarkerDestroyer interface { - FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) - Destroy(ctx context.Context, id int) error -} - // Destroy deletes a scene and its associated relationships from the // database. func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { @@ -190,7 +181,7 @@ func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDele // DestroyMarker deletes the scene marker from the database and returns a // function that removes the generated files, to be executed after the // transaction is successfully committed. -func DestroyMarker(ctx context.Context, scene *models.Scene, sceneMarker *models.SceneMarker, qb MarkerDestroyer, fileDeleter *FileDeleter) error { +func DestroyMarker(ctx context.Context, scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerDestroyer, fileDeleter *FileDeleter) error { if err := qb.Destroy(ctx, sceneMarker.ID); err != nil { return err } diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 5fa3b8b2df5..90419e2c4d7 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -11,8 +11,6 @@ import ( "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/sliceutil/intslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) @@ -20,18 +18,10 @@ type CoverGetter interface { GetCover(ctx context.Context, sceneID int) ([]byte, error) } -type MarkerTagFinder interface { - tag.Finder - TagFinder - FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) -} - -type MarkerFinder interface { - FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) -} - type TagFinder interface { + models.TagGetter FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) } // ToBasicJSON converts a scene object into its JSON object equivalent. It @@ -88,7 +78,7 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) ( // GetStudioName returns the name of the provided scene's studio. It returns an // empty string if there is no studio assigned to the scene. -func GetStudioName(ctx context.Context, reader studio.Finder, scene *models.Scene) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, scene *models.Scene) (string, error) { if scene.StudioID != nil { studio, err := reader.Find(ctx, *scene.StudioID) if err != nil { @@ -126,7 +116,7 @@ func getTagNames(tags []*models.Tag) []string { } // GetDependentTagIDs returns a slice of unique tag IDs that this scene references. -func GetDependentTagIDs(ctx context.Context, tags MarkerTagFinder, markerReader MarkerFinder, scene *models.Scene) ([]int, error) { +func GetDependentTagIDs(ctx context.Context, tags TagFinder, markerReader models.SceneMarkerFinder, scene *models.Scene) ([]int, error) { var ret []int t, err := tags.FindBySceneID(ctx, scene.ID) @@ -158,13 +148,9 @@ func GetDependentTagIDs(ctx context.Context, tags MarkerTagFinder, markerReader return ret, nil } -type MovieFinder interface { - Find(ctx context.Context, id int) (*models.Movie, error) -} - // GetSceneMoviesJSON returns a slice of SceneMovie JSON representation objects // corresponding to the provided scene's scene movie relationships. -func GetSceneMoviesJSON(ctx context.Context, movieReader MovieFinder, scene *models.Scene) ([]jsonschema.SceneMovie, error) { +func GetSceneMoviesJSON(ctx context.Context, movieReader models.MovieGetter, scene *models.Scene) ([]jsonschema.SceneMovie, error) { sceneMovies := scene.Movies.List() var results []jsonschema.SceneMovie @@ -202,7 +188,7 @@ func GetDependentMovieIDs(ctx context.Context, scene *models.Scene) ([]int, erro // GetSceneMarkersJSON returns a slice of SceneMarker JSON representation // objects corresponding to the provided scene's markers. -func GetSceneMarkersJSON(ctx context.Context, markerReader MarkerFinder, tagReader MarkerTagFinder, scene *models.Scene) ([]jsonschema.SceneMarker, error) { +func GetSceneMarkersJSON(ctx context.Context, markerReader models.SceneMarkerFinder, tagReader TagFinder, scene *models.Scene) ([]jsonschema.SceneMarker, error) { sceneMarkers, err := markerReader.FindBySceneID(ctx, scene.ID) if err != nil { return nil, fmt.Errorf("error getting scene markers: %v", err) diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index 85a63aa5518..19e12ecea70 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -3,7 +3,6 @@ package scene import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -93,9 +92,9 @@ func createFullScene(id int) models.Scene { Rating: &rating, Organized: organized, URLs: models.NewRelatedStrings([]string{url}), - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ { - BaseFile: &file.BaseFile{ + BaseFile: &models.BaseFile{ Path: path, }, }, @@ -111,9 +110,9 @@ func createFullScene(id int) models.Scene { func createEmptyScene(id int) models.Scene { return models.Scene{ ID: id, - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ { - BaseFile: &file.BaseFile{ + BaseFile: &models.BaseFile{ Path: path, }, }, diff --git a/pkg/scene/filename_parser.go b/pkg/scene/filename_parser.go index 3dfab35384b..b7c38863e54 100644 --- a/pkg/scene/filename_parser.go +++ b/pkg/scene/filename_parser.go @@ -450,11 +450,11 @@ func (p *FilenameParser) initWhiteSpaceRegex() { } type FilenameParserRepository struct { - Scene Queryer + Scene models.SceneQueryer Performer PerformerNamesFinder - Studio studio.Queryer + Studio models.StudioQueryer Movie MovieNameFinder - Tag tag.Queryer + Tag models.TagQueryer } func (p *FilenameParser) Parse(ctx context.Context, repo FilenameParserRepository) ([]*models.SceneParserResult, int, error) { @@ -544,7 +544,7 @@ func (p *FilenameParser) queryPerformer(ctx context.Context, qb PerformerNamesFi return ret } -func (p *FilenameParser) queryStudio(ctx context.Context, qb studio.Queryer, studioName string) *models.Studio { +func (p *FilenameParser) queryStudio(ctx context.Context, qb models.StudioQueryer, studioName string) *models.Studio { // massage the performer name studioName = delimiterRE.ReplaceAllString(studioName, " ") @@ -587,7 +587,7 @@ func (p *FilenameParser) queryMovie(ctx context.Context, qb MovieNameFinder, mov return ret } -func (p *FilenameParser) queryTag(ctx context.Context, qb tag.Queryer, tagName string) *models.Tag { +func (p *FilenameParser) queryTag(ctx context.Context, qb models.TagQueryer, tagName string) *models.Tag { // massage the tag name tagName = delimiterRE.ReplaceAllString(tagName, " ") @@ -626,7 +626,7 @@ func (p *FilenameParser) setPerformers(ctx context.Context, qb PerformerNamesFin } } -func (p *FilenameParser) setTags(ctx context.Context, qb tag.Queryer, h sceneHolder, result *models.SceneParserResult) { +func (p *FilenameParser) setTags(ctx context.Context, qb models.TagQueryer, h sceneHolder, result *models.SceneParserResult) { // query for each performer tagsSet := make(map[int]bool) for _, tagName := range h.tags { @@ -642,7 +642,7 @@ func (p *FilenameParser) setTags(ctx context.Context, qb tag.Queryer, h sceneHol } } -func (p *FilenameParser) setStudio(ctx context.Context, qb studio.Queryer, h sceneHolder, result *models.SceneParserResult) { +func (p *FilenameParser) setStudio(ctx context.Context, qb models.StudioQueryer, h sceneHolder, result *models.SceneParserResult) { // query for each performer if h.studio != "" { studio := p.queryStudio(ctx, qb, h.studio) diff --git a/pkg/scene/generate/generator.go b/pkg/scene/generate/generator.go index 49568fb2aed..70f6857ea5d 100644 --- a/pkg/scene/generate/generator.go +++ b/pkg/scene/generate/generator.go @@ -97,7 +97,7 @@ func (g Generator) generateFile(lockCtx *fsutil.LockContext, p Paths, pattern st } if err := fsutil.SafeMove(tmpFn, output); err != nil { - return fmt.Errorf("moving %s to %s", tmpFn, output) + return fmt.Errorf("moving %s to %s failed: %w", tmpFn, output, err) } return nil diff --git a/pkg/scene/hash.go b/pkg/scene/hash.go index 4b06a73ef9c..efa9c0fd3f1 100644 --- a/pkg/scene/hash.go +++ b/pkg/scene/hash.go @@ -1,18 +1,17 @@ package scene import ( - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) // GetHash returns the hash of the file, based on the hash algorithm provided. If // hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned. -func GetHash(f file.File, hashAlgorithm models.HashAlgorithm) string { +func GetHash(f models.File, hashAlgorithm models.HashAlgorithm) string { switch hashAlgorithm { case models.HashAlgorithmMd5: - return f.Base().Fingerprints.GetString(file.FingerprintTypeMD5) + return f.Base().Fingerprints.GetString(models.FingerprintTypeMD5) case models.HashAlgorithmOshash: - return f.Base().Fingerprints.GetString(file.FingerprintTypeOshash) + return f.Base().Fingerprints.GetString(models.FingerprintTypeOshash) default: panic("unknown hash algorithm") } diff --git a/pkg/scene/import.go b/pkg/scene/import.go index 2d73c0f2cb0..8c67cecdf39 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -5,32 +5,25 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" - "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/movie" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type FullCreatorUpdater interface { - CreatorUpdater - Update(ctx context.Context, updatedScene *models.Scene) error - Updater +type ImporterReaderWriter interface { + models.SceneCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } type Importer struct { - ReaderWriter FullCreatorUpdater - FileFinder file.Getter - StudioWriter studio.NameFinderCreator - GalleryFinder gallery.Finder - PerformerWriter performer.NameFinderCreator - MovieWriter movie.NameFinderCreator - TagWriter tag.NameFinderCreator + ReaderWriter ImporterReaderWriter + FileFinder models.FileFinder + StudioWriter models.StudioFinderCreator + GalleryFinder models.GalleryFinder + PerformerWriter models.PerformerFinderCreator + MovieWriter models.MovieFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.Scene MissingRefBehaviour models.ImportMissingRefEnum FileNamingAlgorithm models.HashAlgorithm @@ -123,7 +116,7 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene { } func (i *Importer) populateFiles(ctx context.Context) error { - files := make([]*file.VideoFile, 0) + files := make([]*models.VideoFile, 0) for _, ref := range i.Input.Files { path := ref @@ -135,7 +128,7 @@ func (i *Importer) populateFiles(ctx context.Context) error { if f == nil { return fmt.Errorf("scene file '%s' not found", path) } else { - files = append(files, f.(*file.VideoFile)) + files = append(files, f.(*models.VideoFile)) } } @@ -176,11 +169,10 @@ func (i *Importer) populateStudio(ctx context.Context) error { } func (i *Importer) createStudio(ctx context.Context, name string) (int, error) { - newStudio := &models.Studio{ - Name: name, - } + newStudio := models.NewStudio() + newStudio.Name = name - err := i.StudioWriter.Create(ctx, newStudio) + err := i.StudioWriter.Create(ctx, &newStudio) if err != nil { return 0, err } @@ -286,7 +278,8 @@ func (i *Importer) populatePerformers(ctx context.Context) error { func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*models.Performer, error) { var ret []*models.Performer for _, name := range names { - newPerformer := *models.NewPerformer(name) + newPerformer := models.NewPerformer() + newPerformer.Name = name err := i.PerformerWriter.Create(ctx, &newPerformer) if err != nil { @@ -345,9 +338,10 @@ func (i *Importer) populateMovies(ctx context.Context) error { } func (i *Importer) createMovie(ctx context.Context, name string) (int, error) { - newMovie := models.NewMovie(name) + newMovie := models.NewMovie() + newMovie.Name = name - err := i.MovieWriter.Create(ctx, newMovie) + err := i.MovieWriter.Create(ctx, &newMovie) if err != nil { return 0, err } @@ -413,7 +407,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.scene.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } @@ -437,7 +431,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -472,17 +466,18 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { - newTag := models.NewTag(name) + newTag := models.NewTag() + newTag.Name = name - err := tagWriter.Create(ctx, newTag) + err := tagWriter.Create(ctx, &newTag) if err != nil { return nil, err } - ret = append(ret, newTag) + ret = append(ret, &newTag) } return ret, nil diff --git a/pkg/scene/marker_import.go b/pkg/scene/marker_import.go index 20127cbf8db..33937af7e10 100644 --- a/pkg/scene/marker_import.go +++ b/pkg/scene/marker_import.go @@ -7,20 +7,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/tag" ) type MarkerCreatorUpdater interface { - Create(ctx context.Context, newSceneMarker *models.SceneMarker) error - Update(ctx context.Context, updatedSceneMarker *models.SceneMarker) error + models.SceneMarkerCreatorUpdater FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) - UpdateTags(ctx context.Context, markerID int, tagIDs []int) error } type MarkerImporter struct { SceneID int ReaderWriter MarkerCreatorUpdater - TagWriter tag.NameFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.SceneMarker MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/scene/marker_query.go b/pkg/scene/marker_query.go index e4ae5b6dfae..d9cd311a78b 100644 --- a/pkg/scene/marker_query.go +++ b/pkg/scene/marker_query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type MarkerQueryer interface { - Query(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) ([]*models.SceneMarker, int, error) -} - -type MarkerCountQueryer interface { - QueryCount(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) (int, error) -} - -func MarkerCountByTagID(ctx context.Context, r MarkerCountQueryer, id int, depth *int) (int, error) { +func MarkerCountByTagID(ctx context.Context, r models.SceneMarkerQueryer, id int, depth *int) (int, error) { filter := &models.SceneMarkerFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/scene/merge.go b/pkg/scene/merge.go index ed660d83e2b..8934f5515a8 100644 --- a/pkg/scene/merge.go +++ b/pkg/scene/merge.go @@ -6,7 +6,6 @@ import ( "fmt" "os" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -33,7 +32,7 @@ func (s *Service) Merge(ctx context.Context, sourceIDs []int, destinationID int, return fmt.Errorf("finding source scenes: %w", err) } - var fileIDs []file.ID + var fileIDs []models.FileID for _, src := range sources { // TODO - delete generated files as needed diff --git a/pkg/scene/migrate_screenshots.go b/pkg/scene/migrate_screenshots.go index 94d73643f07..59eade29957 100644 --- a/pkg/scene/migrate_screenshots.go +++ b/pkg/scene/migrate_screenshots.go @@ -20,7 +20,8 @@ type MigrateSceneScreenshotsInput struct { type HashFinderCoverUpdater interface { FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) - CoverUpdater + HasCover(ctx context.Context, sceneID int) (bool, error) + UpdateCover(ctx context.Context, sceneID int, cover []byte) error } type ScreenshotMigrator struct { diff --git a/pkg/scene/query.go b/pkg/scene/query.go index 3dc7524ed90..a8b1993a6a0 100644 --- a/pkg/scene/query.go +++ b/pkg/scene/query.go @@ -11,19 +11,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, options models.SceneQueryOptions) (*models.SceneQueryResult, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) (int, error) -} - -type IDFinder interface { - Find(ctx context.Context, id int) (*models.Scene, error) - FindMany(ctx context.Context, ids []int) ([]*models.Scene, error) -} - // QueryOptions returns a SceneQueryOptions populated with the provided filters. func QueryOptions(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, count bool) models.SceneQueryOptions { return models.SceneQueryOptions{ @@ -36,7 +23,7 @@ func QueryOptions(sceneFilter *models.SceneFilterType, findFilter *models.FindFi } // QueryWithCount queries for scenes, returning the scene objects and the total count. -func QueryWithCount(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { +func QueryWithCount(ctx context.Context, qb models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { // this was moved from the queryBuilder code // left here so that calling functions can reference this instead result, err := qb.Query(ctx, QueryOptions(sceneFilter, findFilter, true)) @@ -53,7 +40,7 @@ func QueryWithCount(ctx context.Context, qb Queryer, sceneFilter *models.SceneFi } // Query queries for scenes using the provided filters. -func Query(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, error) { +func Query(ctx context.Context, qb models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, error) { result, err := qb.Query(ctx, QueryOptions(sceneFilter, findFilter, false)) if err != nil { return nil, err @@ -67,7 +54,7 @@ func Query(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, return scenes, nil } -func BatchProcess(ctx context.Context, reader Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, fn func(scene *models.Scene) error) error { +func BatchProcess(ctx context.Context, reader models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, fn func(scene *models.Scene) error) error { const batchSize = 1000 if findFilter == nil { @@ -134,7 +121,7 @@ func FilterFromPaths(paths []string) *models.SceneFilterType { return ret } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.SceneQueryer, id int, depth *int) (int, error) { filter := &models.SceneFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -146,7 +133,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.SceneQueryer, id int, depth *int) (int, error) { filter := &models.SceneFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/scene/scan.go b/pkg/scene/scan.go index 5ccdee25601..821485eb9a6 100644 --- a/pkg/scene/scan.go +++ b/pkg/scene/scan.go @@ -4,9 +4,7 @@ import ( "context" "errors" "fmt" - "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -19,21 +17,22 @@ var ( ErrNotVideoFile = errors.New("not a video file") ) -type CreatorUpdater interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) - Creator +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) + GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) + + Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.VideoFileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } type ScanGenerator interface { - Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error + Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error } type ScanHandler struct { - CreatorUpdater CreatorUpdater + CreatorUpdater ScanCreatorUpdater ScanGenerator ScanGenerator CaptionUpdater video.CaptionUpdater @@ -63,12 +62,12 @@ func (h *ScanHandler) validate() error { return nil } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if err := h.validate(); err != nil { return err } - videoFile, ok := f.(*file.VideoFile) + videoFile, ok := f.(*models.VideoFile) if !ok { return ErrNotVideoFile } @@ -100,21 +99,17 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File } } else { // create a new scene - now := time.Now() - newScene := &models.Scene{ - CreatedAt: now, - UpdatedAt: now, - } + newScene := models.NewScene() logger.Infof("%s doesn't exist. Creating new scene...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, newScene, []file.ID{videoFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, &newScene, []models.FileID{videoFile.ID}); err != nil { return fmt.Errorf("creating new scene: %w", err) } h.PluginCache.RegisterPostHooks(ctx, newScene.ID, plugin.SceneCreatePost, nil, nil) - existing = []*models.Scene{newScene} + existing = []*models.Scene{&newScene} } if oldFile != nil { @@ -140,7 +135,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *file.VideoFile, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *models.VideoFile, updateExisting bool) error { for _, s := range existing { if err := s.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -162,7 +157,8 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. } // update updated_at time - if _, err := h.CreatorUpdater.UpdatePartial(ctx, s.ID, models.NewScenePartial()); err != nil { + scenePartial := models.NewScenePartial() + if _, err := h.CreatorUpdater.UpdatePartial(ctx, s.ID, scenePartial); err != nil { return fmt.Errorf("updating scene: %w", err) } } diff --git a/pkg/scene/service.go b/pkg/scene/service.go index f7b51ce1e95..05fa9f532eb 100644 --- a/pkg/scene/service.go +++ b/pkg/scene/service.go @@ -1,58 +1,19 @@ package scene import ( - "context" - - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/plugin" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) -} - -type FileAssigner interface { - AssignFiles(ctx context.Context, sceneID int, fileID []file.ID) error -} - -type Creator interface { - Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error -} - -type CoverUpdater interface { - HasCover(ctx context.Context, sceneID int) (bool, error) - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - type Config interface { GetVideoFileNamingAlgorithm() models.HashAlgorithm } -type Repository interface { - IDFinder - FinderByFile - Creator - PartialUpdater - Destroyer - models.VideoFileLoader - FileAssigner - CoverUpdater - models.SceneReader -} - -type MarkerRepository interface { - MarkerFinder - MarkerDestroyer - - Update(ctx context.Context, updatedObject *models.SceneMarker) error -} - type Service struct { - File file.Store - Repository Repository - MarkerRepository MarkerRepository + File models.FileReaderWriter + Repository models.SceneReaderWriter + MarkerRepository models.SceneMarkerReaderWriter PluginCache *plugin.Cache Paths *paths.Paths diff --git a/pkg/scene/update.go b/pkg/scene/update.go index e3f3e252bde..629fdedad47 100644 --- a/pkg/scene/update.go +++ b/pkg/scene/update.go @@ -6,20 +6,10 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -type Updater interface { - PartialUpdater - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error) -} - var ErrEmptyUpdater = errors.New("no fields have been set") // UpdateSet is used to update a scene and its relationships. @@ -46,7 +36,7 @@ func (u *UpdateSet) IsEmpty() bool { // Update updates a scene by updating the fields in the Partial field, then // updates non-nil relationships. Returns an error if there is no work to // be done. -func (u *UpdateSet) Update(ctx context.Context, qb Updater) (*models.Scene, error) { +func (u *UpdateSet) Update(ctx context.Context, qb models.SceneUpdater) (*models.Scene, error) { if u.IsEmpty() { return nil, ErrEmptyUpdater } @@ -83,37 +73,37 @@ func (u UpdateSet) UpdateInput() models.SceneUpdateInput { return ret } -func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Scene, performerID int) error { - _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ - PerformerIDs: &models.UpdateIDs{ - IDs: []int{performerID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }) +func AddPerformer(ctx context.Context, qb models.SceneUpdater, o *models.Scene, performerID int) error { + scenePartial := models.NewScenePartial() + scenePartial.PerformerIDs = &models.UpdateIDs{ + IDs: []int{performerID}, + Mode: models.RelationshipUpdateModeAdd, + } + _, err := qb.UpdatePartial(ctx, o.ID, scenePartial) return err } -func AddTag(ctx context.Context, qb PartialUpdater, o *models.Scene, tagID int) error { - _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{tagID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }) +func AddTag(ctx context.Context, qb models.SceneUpdater, o *models.Scene, tagID int) error { + scenePartial := models.NewScenePartial() + scenePartial.TagIDs = &models.UpdateIDs{ + IDs: []int{tagID}, + Mode: models.RelationshipUpdateModeAdd, + } + _, err := qb.UpdatePartial(ctx, o.ID, scenePartial) return err } -func AddGallery(ctx context.Context, qb PartialUpdater, o *models.Scene, galleryID int) error { - _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ - TagIDs: &models.UpdateIDs{ - IDs: []int{galleryID}, - Mode: models.RelationshipUpdateModeAdd, - }, - }) +func AddGallery(ctx context.Context, qb models.SceneUpdater, o *models.Scene, galleryID int) error { + scenePartial := models.NewScenePartial() + scenePartial.TagIDs = &models.UpdateIDs{ + IDs: []int{galleryID}, + Mode: models.RelationshipUpdateModeAdd, + } + _, err := qb.UpdatePartial(ctx, o.ID, scenePartial) return err } -func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) error { +func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error { // ensure file isn't a primary file and that it is a video file f, err := s.File.Find(ctx, fileID) if err != nil { @@ -121,7 +111,7 @@ func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) e } ff := f[0] - if _, ok := ff.(*file.VideoFile); !ok { + if _, ok := ff.(*models.VideoFile); !ok { return fmt.Errorf("%s is not a video file", ff.Base().Path) } @@ -134,5 +124,5 @@ func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) e return errors.New("cannot reassign primary file") } - return s.Repository.AssignFiles(ctx, sceneID, []file.ID{fileID}) + return s.Repository.AssignFiles(ctx, sceneID, []models.FileID{fileID}) } diff --git a/pkg/scraper/autotag.go b/pkg/scraper/autotag.go index 6ba8b371d5c..5eb3922a804 100644 --- a/pkg/scraper/autotag.go +++ b/pkg/scraper/autotag.go @@ -20,14 +20,14 @@ const ( type autotagScraper struct { // repository models.Repository txnManager txn.Manager - performerReader match.PerformerAutoTagQueryer - studioReader match.StudioAutoTagQueryer - tagReader match.TagAutoTagQueryer + performerReader models.PerformerAutoTagQueryer + studioReader models.StudioAutoTagQueryer + tagReader models.TagAutoTagQueryer globalConfig GlobalConfig } -func autotagMatchPerformers(ctx context.Context, path string, performerReader match.PerformerAutoTagQueryer, trimExt bool) ([]*models.ScrapedPerformer, error) { +func autotagMatchPerformers(ctx context.Context, path string, performerReader models.PerformerAutoTagQueryer, trimExt bool) ([]*models.ScrapedPerformer, error) { p, err := match.PathToPerformers(ctx, path, performerReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching performers: %w", err) @@ -52,7 +52,7 @@ func autotagMatchPerformers(ctx context.Context, path string, performerReader ma return ret, nil } -func autotagMatchStudio(ctx context.Context, path string, studioReader match.StudioAutoTagQueryer, trimExt bool) (*models.ScrapedStudio, error) { +func autotagMatchStudio(ctx context.Context, path string, studioReader models.StudioAutoTagQueryer, trimExt bool) (*models.ScrapedStudio, error) { studio, err := match.PathToStudio(ctx, path, studioReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching studios: %w", err) @@ -69,7 +69,7 @@ func autotagMatchStudio(ctx context.Context, path string, studioReader match.Stu return nil, nil } -func autotagMatchTags(ctx context.Context, path string, tagReader match.TagAutoTagQueryer, trimExt bool) ([]*models.ScrapedTag, error) { +func autotagMatchTags(ctx context.Context, path string, tagReader models.TagAutoTagQueryer, trimExt bool) ([]*models.ScrapedTag, error) { t, err := match.PathToTags(ctx, path, tagReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching tags: %w", err) diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index d526ecb0a6b..c110944f624 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -15,8 +15,6 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" ) @@ -53,27 +51,27 @@ func isCDPPathWS(c GlobalConfig) bool { } type SceneFinder interface { - scene.IDFinder + models.SceneGetter models.URLLoader } type PerformerFinder interface { - match.PerformerAutoTagQueryer + models.PerformerAutoTagQueryer match.PerformerFinder } type StudioFinder interface { - match.StudioAutoTagQueryer - match.StudioFinder + models.StudioAutoTagQueryer + FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) } type TagFinder interface { - match.TagAutoTagQueryer - tag.Queryer + models.TagGetter + models.TagAutoTagQueryer } type GalleryFinder interface { - Find(ctx context.Context, id int) (*models.Gallery, error) + models.GalleryGetter models.FileLoader } diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go index e2d404d7c19..e504e4d1cac 100644 --- a/pkg/scraper/postprocessing.go +++ b/pkg/scraper/postprocessing.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" ) @@ -201,7 +200,7 @@ func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery) (Scraped return g, nil } -func postProcessTags(ctx context.Context, tqb tag.Queryer, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) { +func postProcessTags(ctx context.Context, tqb models.TagQueryer, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) { var ret []*models.ScrapedTag for _, t := range scrapedTags { diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index 6a5df09e9d6..7abff7032e2 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -19,41 +19,39 @@ import ( "github.com/Yamashou/gqlgenc/graphqljson" "github.com/gofrs/uuid" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper" "github.com/stashapp/stash/pkg/scraper/stashbox/graphql" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type SceneReader interface { - Find(ctx context.Context, id int) (*models.Scene, error) + models.SceneGetter models.StashIDLoader models.VideoFileLoader } type PerformerReader interface { + models.PerformerGetter match.PerformerFinder - Find(ctx context.Context, id int) (*models.Performer, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) models.AliasLoader models.StashIDLoader + FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) GetImage(ctx context.Context, performerID int) ([]byte, error) } type StudioReader interface { + models.StudioGetter match.StudioFinder - studio.Finder models.StashIDLoader } + type TagFinder interface { - tag.Queryer + models.TagQueryer FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) } @@ -151,7 +149,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) var sceneFPs []*graphql.FingerprintQueryInput for _, f := range scene.Files.List() { - checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5) + checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5) if checksum != "" { sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ Hash: checksum, @@ -159,7 +157,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) }) } - oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash) + oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash) if oshash != "" { sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ Hash: oshash, @@ -167,7 +165,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) }) } - phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash) + phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash) if phash != 0 { phashStr := utils.PhashToString(phash) sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ @@ -279,7 +277,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin duration := f.Duration if duration != 0 { - if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" { + if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" { fingerprint := graphql.FingerprintInput{ Hash: checksum, Algorithm: graphql.FingerprintAlgorithmMd5, @@ -291,7 +289,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin }) } - if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" { + if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" { fingerprint := graphql.FingerprintInput{ Hash: oshash, Algorithm: graphql.FingerprintAlgorithmOshash, @@ -303,7 +301,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin }) } - if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 { + if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 { fingerprint := graphql.FingerprintInput{ Hash: utils.PhashToString(phash), Algorithm: graphql.FingerprintAlgorithmPhash, @@ -979,7 +977,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo duration := f.Duration if duration != 0 { - if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" { + if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" { fingerprint := graphql.FingerprintInput{ Hash: oshash, Algorithm: graphql.FingerprintAlgorithmOshash, @@ -988,7 +986,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo fingerprints = appendFingerprintUnique(fingerprints, &fingerprint) } - if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" { + if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" { fingerprint := graphql.FingerprintInput{ Hash: checksum, Algorithm: graphql.FingerprintAlgorithmMd5, @@ -997,7 +995,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo fingerprints = appendFingerprintUnique(fingerprints, &fingerprint) } - if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 { + if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 { fingerprint := graphql.FingerprintInput{ Hash: utils.PhashToString(phash), Algorithm: graphql.FingerprintAlgorithmPhash, diff --git a/pkg/sqlite/anonymise.go b/pkg/sqlite/anonymise.go index d8e6d99d6ad..7e4efd70299 100644 --- a/pkg/sqlite/anonymise.go +++ b/pkg/sqlite/anonymise.go @@ -368,7 +368,6 @@ func (db *Anonymiser) anonymiseImages(ctx context.Context) error { query := dialect.From(table).Select( table.Col(idColumn), table.Col("title"), - table.Col("url"), ).Where(table.Col(idColumn).Gt(lastID)).Limit(1000) gotSome = false @@ -378,20 +377,17 @@ func (db *Anonymiser) anonymiseImages(ctx context.Context) error { var ( id int title sql.NullString - url sql.NullString ) if err := rows.Scan( &id, &title, - &url, ); err != nil { return err } set := goqu.Record{} db.obfuscateNullString(set, "title", title) - db.obfuscateNullString(set, "url", url) if len(set) > 0 { stmt := dialect.Update(table).Set(set).Where(table.Col(idColumn).Eq(id)) @@ -416,6 +412,10 @@ func (db *Anonymiser) anonymiseImages(ctx context.Context) error { } } + if err := db.anonymiseURLs(ctx, goqu.T(imagesURLsTable), "image_id"); err != nil { + return err + } + return nil } diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go index 40a2555fd68..f5291ca1a72 100644 --- a/pkg/sqlite/database.go +++ b/pkg/sqlite/database.go @@ -33,7 +33,7 @@ const ( dbConnTimeout = 30 ) -var appSchemaVersion uint = 48 +var appSchemaVersion uint = 50 //go:embed migrations/*.sql var migrationsBox embed.FS @@ -74,10 +74,10 @@ type Database struct { Scene *SceneStore SceneMarker *SceneMarkerStore Performer *PerformerStore + SavedFilter *SavedFilterStore Studio *StudioStore Tag *TagStore Movie *MovieStore - SavedFilter *SavedFilterStore db *sqlx.DB dbPath string diff --git a/pkg/sqlite/file.go b/pkg/sqlite/file.go index 760a7746558..2113aad13fd 100644 --- a/pkg/sqlite/file.go +++ b/pkg/sqlite/file.go @@ -13,7 +13,6 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) @@ -31,17 +30,17 @@ const ( ) type basicFileRow struct { - ID file.ID `db:"id" goqu:"skipinsert"` - Basename string `db:"basename"` - ZipFileID null.Int `db:"zip_file_id"` - ParentFolderID file.FolderID `db:"parent_folder_id"` - Size int64 `db:"size"` - ModTime Timestamp `db:"mod_time"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + ID models.FileID `db:"id" goqu:"skipinsert"` + Basename string `db:"basename"` + ZipFileID null.Int `db:"zip_file_id"` + ParentFolderID models.FolderID `db:"parent_folder_id"` + Size int64 `db:"size"` + ModTime Timestamp `db:"mod_time"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` } -func (r *basicFileRow) fromBasicFile(o file.BaseFile) { +func (r *basicFileRow) fromBasicFile(o models.BaseFile) { r.ID = o.ID r.Basename = o.Basename r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID) @@ -53,20 +52,20 @@ func (r *basicFileRow) fromBasicFile(o file.BaseFile) { } type videoFileRow struct { - FileID file.ID `db:"file_id"` - Format string `db:"format"` - Width int `db:"width"` - Height int `db:"height"` - Duration float64 `db:"duration"` - VideoCodec string `db:"video_codec"` - AudioCodec string `db:"audio_codec"` - FrameRate float64 `db:"frame_rate"` - BitRate int64 `db:"bit_rate"` - Interactive bool `db:"interactive"` - InteractiveSpeed null.Int `db:"interactive_speed"` -} - -func (f *videoFileRow) fromVideoFile(ff file.VideoFile) { + FileID models.FileID `db:"file_id"` + Format string `db:"format"` + Width int `db:"width"` + Height int `db:"height"` + Duration float64 `db:"duration"` + VideoCodec string `db:"video_codec"` + AudioCodec string `db:"audio_codec"` + FrameRate float64 `db:"frame_rate"` + BitRate int64 `db:"bit_rate"` + Interactive bool `db:"interactive"` + InteractiveSpeed null.Int `db:"interactive_speed"` +} + +func (f *videoFileRow) fromVideoFile(ff models.VideoFile) { f.FileID = ff.ID f.Format = ff.Format f.Width = ff.Width @@ -81,13 +80,13 @@ func (f *videoFileRow) fromVideoFile(ff file.VideoFile) { } type imageFileRow struct { - FileID file.ID `db:"file_id"` - Format string `db:"format"` - Width int `db:"width"` - Height int `db:"height"` + FileID models.FileID `db:"file_id"` + Format string `db:"format"` + Width int `db:"width"` + Height int `db:"height"` } -func (f *imageFileRow) fromImageFile(ff file.ImageFile) { +func (f *imageFileRow) fromImageFile(ff models.ImageFile) { f.FileID = ff.ID f.Format = ff.Format f.Width = ff.Width @@ -110,8 +109,8 @@ type videoFileQueryRow struct { InteractiveSpeed null.Int `db:"interactive_speed"` } -func (f *videoFileQueryRow) resolve() *file.VideoFile { - return &file.VideoFile{ +func (f *videoFileQueryRow) resolve() *models.VideoFile { + return &models.VideoFile{ Format: f.Format.String, Width: int(f.Width.Int64), Height: int(f.Height.Int64), @@ -159,8 +158,8 @@ func (imageFileQueryRow) columns(table *table) []interface{} { } } -func (f *imageFileQueryRow) resolve() *file.ImageFile { - return &file.ImageFile{ +func (f *imageFileQueryRow) resolve() *models.ImageFile { + return &models.ImageFile{ Format: f.Format.String, Width: int(f.Width.Int64), Height: int(f.Height.Int64), @@ -186,15 +185,15 @@ type fileQueryRow struct { imageFileQueryRow } -func (r *fileQueryRow) resolve() file.File { - basic := &file.BaseFile{ - ID: file.ID(r.FileID.Int64), - DirEntry: file.DirEntry{ +func (r *fileQueryRow) resolve() models.File { + basic := &models.BaseFile{ + ID: models.FileID(r.FileID.Int64), + DirEntry: models.DirEntry{ ZipFileID: nullIntFileIDPtr(r.ZipFileID), ModTime: r.ModTime.Timestamp, }, Path: filepath.Join(r.FolderPath.String, r.Basename.String), - ParentFolderID: file.FolderID(r.ParentFolderID.Int64), + ParentFolderID: models.FolderID(r.ParentFolderID.Int64), Basename: r.Basename.String, Size: r.Size.Int64, CreatedAt: r.CreatedAt.Timestamp, @@ -202,14 +201,14 @@ func (r *fileQueryRow) resolve() file.File { } if basic.ZipFileID != nil && r.ZipFolderPath.Valid && r.ZipBasename.Valid { - basic.ZipFile = &file.BaseFile{ + basic.ZipFile = &models.BaseFile{ ID: *basic.ZipFileID, Path: filepath.Join(r.ZipFolderPath.String, r.ZipBasename.String), Basename: r.ZipBasename.String, } } - var ret file.File = basic + var ret models.File = basic if r.videoFileQueryRow.Format.Valid { vf := r.videoFileQueryRow.resolve() @@ -228,7 +227,7 @@ func (r *fileQueryRow) resolve() file.File { return ret } -func appendFingerprintsUnique(vs []file.Fingerprint, v ...file.Fingerprint) []file.Fingerprint { +func appendFingerprintsUnique(vs []models.Fingerprint, v ...models.Fingerprint) []models.Fingerprint { for _, vv := range v { found := false for _, vsv := range vs { @@ -245,7 +244,7 @@ func appendFingerprintsUnique(vs []file.Fingerprint, v ...file.Fingerprint) []fi return vs } -func (r *fileQueryRow) appendRelationships(i *file.BaseFile) { +func (r *fileQueryRow) appendRelationships(i *models.BaseFile) { if r.fingerprintQueryRow.valid() { i.Fingerprints = appendFingerprintsUnique(i.Fingerprints, r.fingerprintQueryRow.resolve()) } @@ -253,16 +252,16 @@ func (r *fileQueryRow) appendRelationships(i *file.BaseFile) { type fileQueryRows []fileQueryRow -func (r fileQueryRows) resolve() []file.File { - var ret []file.File - var last file.File - var lastID file.ID +func (r fileQueryRows) resolve() []models.File { + var ret []models.File + var last models.File + var lastID models.FileID for _, row := range r { - if last == nil || lastID != file.ID(row.FileID.Int64) { + if last == nil || lastID != models.FileID(row.FileID.Int64) { f := row.resolve() last = f - lastID = file.ID(row.FileID.Int64) + lastID = models.FileID(row.FileID.Int64) ret = append(ret, last) continue } @@ -295,7 +294,7 @@ func (qb *FileStore) table() exp.IdentifierExpression { return qb.tableMgr.table } -func (qb *FileStore) Create(ctx context.Context, f file.File) error { +func (qb *FileStore) Create(ctx context.Context, f models.File) error { var r basicFileRow r.fromBasicFile(*f.Base()) @@ -304,15 +303,15 @@ func (qb *FileStore) Create(ctx context.Context, f file.File) error { return err } - fileID := file.ID(id) + fileID := models.FileID(id) // create extended stuff here switch ef := f.(type) { - case *file.VideoFile: + case *models.VideoFile: if err := qb.createVideoFile(ctx, fileID, *ef); err != nil { return err } - case *file.ImageFile: + case *models.ImageFile: if err := qb.createImageFile(ctx, fileID, *ef); err != nil { return err } @@ -333,7 +332,7 @@ func (qb *FileStore) Create(ctx context.Context, f file.File) error { return nil } -func (qb *FileStore) Update(ctx context.Context, f file.File) error { +func (qb *FileStore) Update(ctx context.Context, f models.File) error { var r basicFileRow r.fromBasicFile(*f.Base()) @@ -345,11 +344,11 @@ func (qb *FileStore) Update(ctx context.Context, f file.File) error { // create extended stuff here switch ef := f.(type) { - case *file.VideoFile: + case *models.VideoFile: if err := qb.updateOrCreateVideoFile(ctx, id, *ef); err != nil { return err } - case *file.ImageFile: + case *models.ImageFile: if err := qb.updateOrCreateImageFile(ctx, id, *ef); err != nil { return err } @@ -362,11 +361,11 @@ func (qb *FileStore) Update(ctx context.Context, f file.File) error { return nil } -func (qb *FileStore) Destroy(ctx context.Context, id file.ID) error { +func (qb *FileStore) Destroy(ctx context.Context, id models.FileID) error { return qb.tableMgr.destroyExisting(ctx, []int{int(id)}) } -func (qb *FileStore) createVideoFile(ctx context.Context, id file.ID, f file.VideoFile) error { +func (qb *FileStore) createVideoFile(ctx context.Context, id models.FileID, f models.VideoFile) error { var r videoFileRow r.fromVideoFile(f) r.FileID = id @@ -377,7 +376,7 @@ func (qb *FileStore) createVideoFile(ctx context.Context, id file.ID, f file.Vid return nil } -func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id file.ID, f file.VideoFile) error { +func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id models.FileID, f models.VideoFile) error { exists, err := videoFileTableMgr.idExists(ctx, id) if err != nil { return err @@ -397,7 +396,7 @@ func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id file.ID, f return nil } -func (qb *FileStore) createImageFile(ctx context.Context, id file.ID, f file.ImageFile) error { +func (qb *FileStore) createImageFile(ctx context.Context, id models.FileID, f models.ImageFile) error { var r imageFileRow r.fromImageFile(f) r.FileID = id @@ -408,7 +407,7 @@ func (qb *FileStore) createImageFile(ctx context.Context, id file.ID, f file.Ima return nil } -func (qb *FileStore) updateOrCreateImageFile(ctx context.Context, id file.ID, f file.ImageFile) error { +func (qb *FileStore) updateOrCreateImageFile(ctx context.Context, id models.FileID, f models.ImageFile) error { exists, err := imageFileTableMgr.idExists(ctx, id) if err != nil { return err @@ -515,7 +514,7 @@ func (qb *FileStore) countDataset() *goqu.SelectDataset { ) } -func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (file.File, error) { +func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (models.File, error) { ret, err := qb.getMany(ctx, q) if err != nil { return nil, err @@ -528,7 +527,7 @@ func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (file.File, return ret[0], nil } -func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]file.File, error) { +func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]models.File, error) { const single = false var rows fileQueryRows if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { @@ -546,8 +545,8 @@ func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]file return rows.resolve(), nil } -func (qb *FileStore) Find(ctx context.Context, ids ...file.ID) ([]file.File, error) { - var files []file.File +func (qb *FileStore) Find(ctx context.Context, ids ...models.FileID) ([]models.File, error) { + var files []models.File for _, id := range ids { file, err := qb.find(ctx, id) if err != nil { @@ -564,7 +563,7 @@ func (qb *FileStore) Find(ctx context.Context, ids ...file.ID) ([]file.File, err return files, nil } -func (qb *FileStore) find(ctx context.Context, id file.ID) (file.File, error) { +func (qb *FileStore) find(ctx context.Context, id models.FileID) (models.File, error) { q := qb.selectDataset().Where(qb.tableMgr.byID(id)) ret, err := qb.get(ctx, q) @@ -576,7 +575,7 @@ func (qb *FileStore) find(ctx context.Context, id file.ID) (file.File, error) { } // FindByPath returns the first file that matches the given path. Wildcard characters are supported. -func (qb *FileStore) FindByPath(ctx context.Context, p string) (file.File, error) { +func (qb *FileStore) FindByPath(ctx context.Context, p string) (models.File, error) { ret, err := qb.FindAllByPath(ctx, p) @@ -593,7 +592,7 @@ func (qb *FileStore) FindByPath(ctx context.Context, p string) (file.File, error // FindAllByPath returns all the files that match the given path. // Wildcard characters are supported. -func (qb *FileStore) FindAllByPath(ctx context.Context, p string) ([]file.File, error) { +func (qb *FileStore) FindAllByPath(ctx context.Context, p string) ([]models.File, error) { // separate basename from path basename := filepath.Base(p) dirName := filepath.Dir(p) @@ -646,7 +645,7 @@ func (qb *FileStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectD // FindAllByPaths returns the all files that are within any of the given paths. // Returns all if limit is < 0. // Returns all files if p is empty. -func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]file.File, error) { +func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]models.File, error) { table := qb.table() folderTable := folderTableMgr.table @@ -680,7 +679,7 @@ func (qb *FileStore) CountAllInPaths(ctx context.Context, p []string) (int, erro return count(ctx, q) } -func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) ([]file.File, error) { +func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) ([]models.File, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( @@ -692,7 +691,7 @@ func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) return qb.getMany(ctx, q) } -func (qb *FileStore) FindByFingerprint(ctx context.Context, fp file.Fingerprint) ([]file.File, error) { +func (qb *FileStore) FindByFingerprint(ctx context.Context, fp models.Fingerprint) ([]models.File, error) { fingerprintTable := fingerprintTableMgr.table fingerprints := fingerprintTable.As("fp") @@ -705,7 +704,7 @@ func (qb *FileStore) FindByFingerprint(ctx context.Context, fp file.Fingerprint) return qb.findBySubquery(ctx, sq) } -func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]file.File, error) { +func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]models.File, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( @@ -716,7 +715,7 @@ func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([] } // FindByFileInfo finds files that match the base name, size, and mod time of the given file. -func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]file.File, error) { +func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]models.File, error) { table := qb.table() modTime := info.ModTime().Format(time.RFC3339) @@ -730,7 +729,7 @@ func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size return qb.getMany(ctx, q) } -func (qb *FileStore) CountByFolderID(ctx context.Context, folderID file.FolderID) (int, error) { +func (qb *FileStore) CountByFolderID(ctx context.Context, folderID models.FolderID) (int, error) { table := qb.table() q := qb.countDataset().Prepared(true).Where( @@ -740,7 +739,7 @@ func (qb *FileStore) CountByFolderID(ctx context.Context, folderID file.FolderID return count(ctx, q) } -func (qb *FileStore) IsPrimary(ctx context.Context, fileID file.ID) (bool, error) { +func (qb *FileStore) IsPrimary(ctx context.Context, fileID models.FileID) (bool, error) { joinTables := []exp.IdentifierExpression{ scenesFilesJoinTable, galleriesFilesJoinTable, @@ -867,9 +866,9 @@ func (qb *FileStore) Query(ctx context.Context, options models.FileQueryOptions) return nil, fmt.Errorf("error finding IDs: %w", err) } - result.IDs = make([]file.ID, len(idsResult)) + result.IDs = make([]models.FileID, len(idsResult)) for i, id := range idsResult { - result.IDs[i] = file.ID(id) + result.IDs[i] = models.FileID(id) } return result, nil @@ -929,10 +928,10 @@ func (qb *FileStore) captionRepository() *captionRepository { } } -func (qb *FileStore) GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) { +func (qb *FileStore) GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) { return qb.captionRepository().get(ctx, fileID) } -func (qb *FileStore) UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error { +func (qb *FileStore) UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error { return qb.captionRepository().replace(ctx, fileID, captions) } diff --git a/pkg/sqlite/file_test.go b/pkg/sqlite/file_test.go index 2bcbe42e956..766ffcc70b7 100644 --- a/pkg/sqlite/file_test.go +++ b/pkg/sqlite/file_test.go @@ -9,7 +9,7 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -17,10 +17,10 @@ func getFilePath(folderIdx int, basename string) string { return filepath.Join(folderPaths[folderIdx], basename) } -func makeZipFileWithID(index int) file.File { +func makeZipFileWithID(index int) models.File { f := makeFile(index) - return &file.BaseFile{ + return &models.BaseFile{ ID: fileIDs[index], Basename: f.Base().Basename, Path: getFilePath(fileFolders[index], getFileBaseName(index)), @@ -49,13 +49,13 @@ func Test_fileFileStore_Create(t *testing.T) { tests := []struct { name string - newObject file.File + newObject models.File wantErr bool }{ { "full", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -64,7 +64,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -77,9 +77,9 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "video file", - &file.VideoFile{ - BaseFile: &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.VideoFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -88,7 +88,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -110,9 +110,9 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "image file", - &file.ImageFile{ - BaseFile: &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.ImageFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -121,7 +121,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -138,15 +138,15 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "duplicate path", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: fileModTime, }, Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)), ParentFolderID: folderIDs[folderIdxWithFiles], Basename: getFileBaseName(fileIdxZip), Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -159,22 +159,22 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "empty basename", - &file.BaseFile{ + &models.BaseFile{ ParentFolderID: folderIDs[folderIdxWithFiles], }, true, }, { "missing folder id", - &file.BaseFile{ + &models.BaseFile{ Basename: basename, }, true, }, { "invalid folder id", - &file.BaseFile{ - DirEntry: file.DirEntry{}, + &models.BaseFile{ + DirEntry: models.DirEntry{}, ParentFolderID: invalidFolderID, Basename: basename, }, @@ -182,8 +182,8 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "invalid zip file id", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Basename: basename, @@ -210,15 +210,15 @@ func Test_fileFileStore_Create(t *testing.T) { assert.NotZero(s.Base().ID) - var copy file.File + var copy models.File switch t := s.(type) { - case *file.BaseFile: + case *models.BaseFile: v := *t copy = &v - case *file.VideoFile: + case *models.VideoFile: v := *t copy = &v - case *file.ImageFile: + case *models.ImageFile: v := *t copy = &v } @@ -266,14 +266,14 @@ func Test_fileStore_Update(t *testing.T) { tests := []struct { name string - updatedObject file.File + updatedObject models.File wantErr bool }{ { "full", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -282,7 +282,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -295,10 +295,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "video file", - &file.VideoFile{ - BaseFile: &file.BaseFile{ + &models.VideoFile{ + BaseFile: &models.BaseFile{ ID: fileIDs[fileIdxStartVideoFiles], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -307,7 +307,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -329,10 +329,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "image file", - &file.ImageFile{ - BaseFile: &file.BaseFile{ + &models.ImageFile{ + BaseFile: &models.BaseFile{ ID: fileIDs[fileIdxStartImageFiles], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -341,7 +341,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -358,16 +358,16 @@ func Test_fileStore_Update(t *testing.T) { }, { "duplicate path", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ModTime: fileModTime, }, Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)), ParentFolderID: folderIDs[folderIdxWithFiles], Basename: getFileBaseName(fileIdxZip), Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -380,7 +380,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "clear zip", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)+".renamed"), Basename: getFileBaseName(fileIdxZip) + ".renamed", @@ -390,7 +390,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "clear folder", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, }, @@ -398,7 +398,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "invalid parent folder id", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, ParentFolderID: invalidFolderID, @@ -407,10 +407,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "invalid zip file id", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, ParentFolderID: folderIDs[folderIdxWithFiles], @@ -450,7 +450,7 @@ func Test_fileStore_Update(t *testing.T) { } } -func makeFileWithID(index int) file.File { +func makeFileWithID(index int) models.File { ret := makeFile(index) ret.Base().Path = getFilePath(fileFolders[index], getFileBaseName(index)) ret.Base().ID = fileIDs[index] @@ -461,8 +461,8 @@ func makeFileWithID(index int) file.File { func Test_fileStore_Find(t *testing.T) { tests := []struct { name string - id file.ID - want file.File + id models.FileID + want models.File wantErr bool }{ { @@ -473,7 +473,7 @@ func Test_fileStore_Find(t *testing.T) { }, { "invalid", - file.ID(invalidID), + models.FileID(invalidID), nil, true, }, @@ -529,7 +529,7 @@ func Test_FileStore_FindByPath(t *testing.T) { tests := []struct { name string path string - want file.File + want models.File wantErr bool }{ { @@ -565,31 +565,31 @@ func Test_FileStore_FindByPath(t *testing.T) { func TestFileStore_FindByFingerprint(t *testing.T) { tests := []struct { name string - fp file.Fingerprint - want []file.File + fp models.Fingerprint + want []models.File wantErr bool }{ { "by MD5", - file.Fingerprint{ + models.Fingerprint{ Type: "MD5", Fingerprint: getPrefixedStringValue("file", fileIdxZip, "md5"), }, - []file.File{makeFileWithID(fileIdxZip)}, + []models.File{makeFileWithID(fileIdxZip)}, false, }, { "by OSHASH", - file.Fingerprint{ + models.Fingerprint{ Type: "OSHASH", Fingerprint: getPrefixedStringValue("file", fileIdxZip, "oshash"), }, - []file.File{makeFileWithID(fileIdxZip)}, + []models.File{makeFileWithID(fileIdxZip)}, false, }, { "non-existing", - file.Fingerprint{ + models.Fingerprint{ Type: "OSHASH", Fingerprint: "foo", }, @@ -617,7 +617,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) { func TestFileStore_IsPrimary(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID want bool }{ { diff --git a/pkg/sqlite/fingerprint.go b/pkg/sqlite/fingerprint.go index 0f7c36d1274..49bae54caf9 100644 --- a/pkg/sqlite/fingerprint.go +++ b/pkg/sqlite/fingerprint.go @@ -6,7 +6,7 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) @@ -23,8 +23,8 @@ func (r fingerprintQueryRow) valid() bool { return r.Type.Valid } -func (r *fingerprintQueryRow) resolve() file.Fingerprint { - return file.Fingerprint{ +func (r *fingerprintQueryRow) resolve() models.Fingerprint { + return models.Fingerprint{ Type: r.Type.String, Fingerprint: r.Fingerprint, } @@ -45,7 +45,7 @@ var FingerprintReaderWriter = &fingerprintQueryBuilder{ tableMgr: fingerprintTableMgr, } -func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID file.ID, f file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID models.FileID, f models.Fingerprint) error { table := qb.table() q := dialect.Insert(table).Cols(fileIDColumn, "type", "fingerprint").Vals( goqu.Vals{fileID, f.Type, f.Fingerprint}, @@ -58,7 +58,7 @@ func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID file.ID, f return nil } -func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID file.ID, f []file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID models.FileID, f []models.Fingerprint) error { for _, ff := range f { if err := qb.insert(ctx, fileID, ff); err != nil { return err @@ -68,7 +68,7 @@ func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID file. return nil } -func (qb *fingerprintQueryBuilder) replaceJoins(ctx context.Context, fileID file.ID, f []file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) replaceJoins(ctx context.Context, fileID models.FileID, f []models.Fingerprint) error { if err := qb.destroy(ctx, []int{int(fileID)}); err != nil { return err } diff --git a/pkg/sqlite/folder.go b/pkg/sqlite/folder.go index ff1e8a2c559..26cbf896252 100644 --- a/pkg/sqlite/folder.go +++ b/pkg/sqlite/folder.go @@ -10,23 +10,23 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) const folderTable = "folders" type folderRow struct { - ID file.FolderID `db:"id" goqu:"skipinsert"` - Path string `db:"path"` - ZipFileID null.Int `db:"zip_file_id"` - ParentFolderID null.Int `db:"parent_folder_id"` - ModTime Timestamp `db:"mod_time"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + ID models.FolderID `db:"id" goqu:"skipinsert"` + Path string `db:"path"` + ZipFileID null.Int `db:"zip_file_id"` + ParentFolderID null.Int `db:"parent_folder_id"` + ModTime Timestamp `db:"mod_time"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` } -func (r *folderRow) fromFolder(o file.Folder) { +func (r *folderRow) fromFolder(o models.Folder) { r.ID = o.ID r.Path = o.Path r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID) @@ -43,10 +43,10 @@ type folderQueryRow struct { ZipFolderPath null.String `db:"zip_folder_path"` } -func (r *folderQueryRow) resolve() *file.Folder { - ret := &file.Folder{ +func (r *folderQueryRow) resolve() *models.Folder { + ret := &models.Folder{ ID: r.ID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: nullIntFileIDPtr(r.ZipFileID), ModTime: r.ModTime.Timestamp, }, @@ -57,7 +57,7 @@ func (r *folderQueryRow) resolve() *file.Folder { } if ret.ZipFileID != nil && r.ZipFolderPath.Valid && r.ZipBasename.Valid { - ret.ZipFile = &file.BaseFile{ + ret.ZipFile = &models.BaseFile{ ID: *ret.ZipFileID, Path: filepath.Join(r.ZipFolderPath.String, r.ZipBasename.String), Basename: r.ZipBasename.String, @@ -69,8 +69,8 @@ func (r *folderQueryRow) resolve() *file.Folder { type folderQueryRows []folderQueryRow -func (r folderQueryRows) resolve() []*file.Folder { - var ret []*file.Folder +func (r folderQueryRows) resolve() []*models.Folder { + var ret []*models.Folder for _, row := range r { f := row.resolve() @@ -97,7 +97,7 @@ func NewFolderStore() *FolderStore { } } -func (qb *FolderStore) Create(ctx context.Context, f *file.Folder) error { +func (qb *FolderStore) Create(ctx context.Context, f *models.Folder) error { var r folderRow r.fromFolder(*f) @@ -107,12 +107,12 @@ func (qb *FolderStore) Create(ctx context.Context, f *file.Folder) error { } // only assign id once we are successful - f.ID = file.FolderID(id) + f.ID = models.FolderID(id) return nil } -func (qb *FolderStore) Update(ctx context.Context, updatedObject *file.Folder) error { +func (qb *FolderStore) Update(ctx context.Context, updatedObject *models.Folder) error { var r folderRow r.fromFolder(*updatedObject) @@ -123,7 +123,7 @@ func (qb *FolderStore) Update(ctx context.Context, updatedObject *file.Folder) e return nil } -func (qb *FolderStore) Destroy(ctx context.Context, id file.FolderID) error { +func (qb *FolderStore) Destroy(ctx context.Context, id models.FolderID) error { return qb.tableMgr.destroyExisting(ctx, []int{int(id)}) } @@ -179,7 +179,7 @@ func (qb *FolderStore) countDataset() *goqu.SelectDataset { ) } -func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*file.Folder, error) { +func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Folder, error) { ret, err := qb.getMany(ctx, q) if err != nil { return nil, err @@ -192,7 +192,7 @@ func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*file.Fo return ret[0], nil } -func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*file.Folder, error) { +func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Folder, error) { const single = false var rows folderQueryRows if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { @@ -210,7 +210,7 @@ func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*f return rows.resolve(), nil } -func (qb *FolderStore) Find(ctx context.Context, id file.FolderID) (*file.Folder, error) { +func (qb *FolderStore) Find(ctx context.Context, id models.FolderID) (*models.Folder, error) { q := qb.selectDataset().Where(qb.tableMgr.byID(id)) ret, err := qb.get(ctx, q) @@ -221,7 +221,7 @@ func (qb *FolderStore) Find(ctx context.Context, id file.FolderID) (*file.Folder return ret, nil } -func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*file.Folder, error) { +func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*models.Folder, error) { q := qb.selectDataset().Prepared(true).Where(qb.table().Col("path").Eq(p)) ret, err := qb.get(ctx, q) @@ -232,7 +232,7 @@ func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*file.Folder, return ret, nil } -func (qb *FolderStore) FindByParentFolderID(ctx context.Context, parentFolderID file.FolderID) ([]*file.Folder, error) { +func (qb *FolderStore) FindByParentFolderID(ctx context.Context, parentFolderID models.FolderID) ([]*models.Folder, error) { q := qb.selectDataset().Where(qb.table().Col("parent_folder_id").Eq(int(parentFolderID))) ret, err := qb.getMany(ctx, q) @@ -261,7 +261,7 @@ func (qb *FolderStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.Selec // FindAllInPaths returns the all folders that are or are within any of the given paths. // Returns all if limit is < 0. // Returns all folders if p is empty. -func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*file.Folder, error) { +func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*models.Folder, error) { q := qb.selectDataset().Prepared(true) q = qb.allInPaths(q, p) @@ -300,7 +300,7 @@ func (qb *FolderStore) CountAllInPaths(ctx context.Context, p []string) (int, er // return qb.getMany(ctx, q) // } -func (qb *FolderStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*file.Folder, error) { +func (qb *FolderStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Folder, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( diff --git a/pkg/sqlite/folder_test.go b/pkg/sqlite/folder_test.go index 71e45305a09..1d948d06368 100644 --- a/pkg/sqlite/folder_test.go +++ b/pkg/sqlite/folder_test.go @@ -9,13 +9,13 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) var ( - invalidFolderID = file.FolderID(invalidID) - invalidFileID = file.ID(invalidID) + invalidFolderID = models.FolderID(invalidID) + invalidFileID = models.FileID(invalidID) ) func Test_FolderStore_Create(t *testing.T) { @@ -28,13 +28,13 @@ func Test_FolderStore_Create(t *testing.T) { tests := []struct { name string - newObject file.Folder + newObject models.Folder wantErr bool }{ { "full", - file.Folder{ - DirEntry: file.DirEntry{ + models.Folder{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -47,7 +47,7 @@ func Test_FolderStore_Create(t *testing.T) { }, { "invalid parent folder id", - file.Folder{ + models.Folder{ Path: path, ParentFolderID: &invalidFolderID, }, @@ -55,8 +55,8 @@ func Test_FolderStore_Create(t *testing.T) { }, { "invalid zip file id", - file.Folder{ - DirEntry: file.DirEntry{ + models.Folder{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Path: path, @@ -109,14 +109,14 @@ func Test_FolderStore_Update(t *testing.T) { tests := []struct { name string - updatedObject *file.Folder + updatedObject *models.Folder wantErr bool }{ { "full", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -129,7 +129,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "clear zip", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxInZip], Path: path, }, @@ -137,7 +137,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "clear folder", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], Path: path, }, @@ -145,7 +145,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "invalid parent folder id", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], Path: path, ParentFolderID: &invalidFolderID, @@ -154,9 +154,9 @@ func Test_FolderStore_Update(t *testing.T) { }, { "invalid zip file id", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Path: path, @@ -192,7 +192,7 @@ func Test_FolderStore_Update(t *testing.T) { } } -func makeFolderWithID(index int) *file.Folder { +func makeFolderWithID(index int) *models.Folder { ret := makeFolder(index) ret.ID = folderIDs[index] @@ -207,7 +207,7 @@ func Test_FolderStore_FindByPath(t *testing.T) { tests := []struct { name string path string - want *file.Folder + want *models.Folder wantErr bool }{ { diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index b7ece948d74..7bdf98bd31a 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -11,7 +11,6 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "gopkg.in/guregu/null.v4" @@ -163,7 +162,7 @@ func (qb *GalleryStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, fileIDs []file.ID) error { +func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error { var r galleryRow r.fromGallery(*newObject) @@ -230,7 +229,7 @@ func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Galler } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.Base().ID } @@ -287,7 +286,7 @@ func (qb *GalleryStore) Destroy(ctx context.Context, id int) error { return qb.tableMgr.destroyExisting(ctx, []int{id}) } -func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, error) { +func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -299,13 +298,13 @@ func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, erro return nil, err } - ret := make([]file.File, len(files)) + ret := make([]models.File, len(files)) copy(ret, files) return ret, nil } -func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } @@ -412,7 +411,7 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]* return ret, nil } -func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) { sq := dialect.From(galleriesFilesJoinTable).Select(galleriesFilesJoinTable.Col(galleryIDColumn)).Where( galleriesFilesJoinTable.Col(fileIDColumn).Eq(fileID), ) @@ -425,14 +424,14 @@ func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mo return ret, nil } -func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := galleriesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) { fingerprintTable := fingerprintTableMgr.table var ex []exp.Expression @@ -460,20 +459,20 @@ func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Finger } func (qb *GalleryStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Gallery, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) } func (qb *GalleryStore) FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error) { - fingerprints := make([]file.Fingerprint, len(checksums)) + fingerprints := make([]models.Fingerprint, len(checksums)) for i, c := range checksums { - fingerprints[i] = file.Fingerprint{ - Type: file.FingerprintTypeMD5, + fingerprints[i] = models.Fingerprint{ + Type: models.FingerprintTypeMD5, Fingerprint: c, } } @@ -519,7 +518,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal return ret, nil } -func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) { table := qb.table() sq := dialect.From(table).Select(table.Col(idColumn)).Where( @@ -1118,9 +1117,9 @@ func (qb *GalleryStore) filesRepository() *filesRepository { } } -func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *GalleryStore) performersRepository() *joinRepository { diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index d33d5ba2a96..c8dbe02762f 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -10,7 +10,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -97,7 +96,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ galleryFile, }), CreatedAt: createdAt, @@ -145,9 +144,9 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { assert := assert.New(t) s := tt.newObject - var fileIDs []file.ID + var fileIDs []models.FileID if s.Files.Loaded() { - fileIDs = []file.ID{s.Files.List()[0].Base().ID} + fileIDs = []models.FileID{s.Files.List()[0].Base().ID} } if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr { @@ -195,7 +194,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { } } -func makeGalleryFileWithID(i int) *file.BaseFile { +func makeGalleryFileWithID(i int) *models.BaseFile { ret := makeGalleryFile(i) ret.ID = galleryFileIDs[i] return ret @@ -229,7 +228,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFileWithID(galleryIdxWithScene), }), CreatedAt: createdAt, @@ -449,7 +448,7 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithGallery], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFile(galleryIdxWithImage), }), CreatedAt: createdAt, @@ -466,7 +465,7 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) { clearGalleryPartial(), models.Gallery{ ID: galleryIDs[galleryIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFile(galleryIdxWithImage), }), SceneIDs: models.NewRelatedIDs([]int{}), @@ -844,7 +843,7 @@ func makeGalleryWithID(index int) *models.Gallery { ret := makeGallery(index, includeScenes) ret.ID = galleryIDs[index] - ret.Files = models.NewRelatedFiles([]file.File{makeGalleryFile(index)}) + ret.Files = models.NewRelatedFiles([]models.File{makeGalleryFile(index)}) return ret } @@ -1281,7 +1280,7 @@ func galleriesToIDs(i []*models.Gallery) []int { func Test_galleryStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1330,7 +1329,7 @@ func Test_galleryStore_FindByFileID(t *testing.T) { func Test_galleryStore_FindByFolderID(t *testing.T) { tests := []struct { name string - folderID file.FolderID + folderID models.FolderID include []int exclude []int }{ diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index 20e7801d8bc..0ee12f0d947 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -8,7 +8,6 @@ import ( "path/filepath" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "gopkg.in/guregu/null.v4" @@ -25,27 +24,27 @@ const ( performersImagesTable = "performers_images" imagesTagsTable = "images_tags" imagesFilesTable = "images_files" + imagesURLsTable = "image_urls" + imageURLColumn = "url" ) type imageRow struct { ID int `db:"id" goqu:"skipinsert"` Title zero.String `db:"title"` // expressed as 1-100 - Rating null.Int `db:"rating"` - URL zero.String `db:"url"` - Date NullDate `db:"date"` - Organized bool `db:"organized"` - OCounter int `db:"o_counter"` - StudioID null.Int `db:"studio_id,omitempty"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + Rating null.Int `db:"rating"` + Date NullDate `db:"date"` + Organized bool `db:"organized"` + OCounter int `db:"o_counter"` + StudioID null.Int `db:"studio_id,omitempty"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` } func (r *imageRow) fromImage(i models.Image) { r.ID = i.ID r.Title = zero.StringFrom(i.Title) r.Rating = intFromPtr(i.Rating) - r.URL = zero.StringFrom(i.URL) r.Date = NullDateFromDatePtr(i.Date) r.Organized = i.Organized r.OCounter = i.OCounter @@ -67,7 +66,6 @@ func (r *imageQueryRow) resolve() *models.Image { ID: r.ID, Title: r.Title.String, Rating: nullIntPtr(r.Rating), - URL: r.URL.String, Date: r.Date.DatePtr(), Organized: r.Organized, OCounter: r.OCounter, @@ -94,7 +92,6 @@ type imageRowRecord struct { func (r *imageRowRecord) fromPartial(i models.ImagePartial) { r.setNullString("title", i.Title) r.setNullInt("rating", i.Rating) - r.setNullString("url", i.URL) r.setNullDate("date", i.Date) r.setBool("organized", i.Organized) r.setInt("o_counter", i.OCounter) @@ -150,7 +147,7 @@ func (qb *ImageStore) selectDataset() *goqu.SelectDataset { checksum, goqu.On( checksum.Col(fileIDColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn)), - checksum.Col("type").Eq(file.FingerprintTypeMD5), + checksum.Col("type").Eq(models.FingerprintTypeMD5), ), ).Select( qb.table().All(), @@ -161,18 +158,25 @@ func (qb *ImageStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *ImageStore) Create(ctx context.Context, newObject *models.ImageCreateInput) error { +func (qb *ImageStore) Create(ctx context.Context, newObject *models.Image, fileIDs []models.FileID) error { var r imageRow - r.fromImage(*newObject.Image) + r.fromImage(*newObject) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { return err } - if len(newObject.FileIDs) > 0 { + if len(fileIDs) > 0 { const firstPrimary = true - if err := imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, newObject.FileIDs); err != nil { + if err := imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, fileIDs); err != nil { + return err + } + } + + if newObject.URLs.Loaded() { + const startPos = 0 + if err := imagesURLsTableMgr.insertJoins(ctx, id, startPos, newObject.URLs.List()); err != nil { return err } } @@ -199,7 +203,7 @@ func (qb *ImageStore) Create(ctx context.Context, newObject *models.ImageCreateI return fmt.Errorf("finding after create: %w", err) } - *newObject.Image = *updated + *newObject = *updated return nil } @@ -224,6 +228,12 @@ func (qb *ImageStore) UpdatePartial(ctx context.Context, id int, partial models. return nil, err } } + + if partial.URLs != nil { + if err := imagesURLsTableMgr.modifyJoins(ctx, id, partial.URLs.Values, partial.URLs.Mode); err != nil { + return nil, err + } + } if partial.PerformerIDs != nil { if err := imagesPerformersTableMgr.modifyJoins(ctx, id, partial.PerformerIDs.IDs, partial.PerformerIDs.Mode); err != nil { return nil, err @@ -252,6 +262,12 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e return err } + if updatedObject.URLs.Loaded() { + if err := imagesURLsTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.URLs.List()); err != nil { + return err + } + } + if updatedObject.PerformerIDs.Loaded() { if err := imagesPerformersTableMgr.replaceJoins(ctx, updatedObject.ID, updatedObject.PerformerIDs.List()); err != nil { return err @@ -271,7 +287,7 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.Base().ID } @@ -389,7 +405,7 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } -func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]file.File, error) { +func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -404,12 +420,12 @@ func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]file.File, error) return files, nil } -func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } -func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) { +func (qb *ImageStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) { table := qb.table() sq := dialect.From(table). @@ -427,14 +443,14 @@ func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode return ret, nil } -func (qb *ImageStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *ImageStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := imagesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) { +func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) { table := qb.table() fingerprintTable := fingerprintTableMgr.table @@ -467,9 +483,9 @@ func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr } func (qb *ImageStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) @@ -523,7 +539,7 @@ func (qb *ImageStore) OCountByPerformerID(ctx context.Context, performerID int) return ret, nil } -func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) { +func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Image, error) { table := qb.table() fileTable := goqu.T(fileTable) @@ -548,7 +564,7 @@ func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID return ret, nil } -func (qb *ImageStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) { +func (qb *ImageStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) { table := qb.table() fileTable := goqu.T(fileTable) @@ -665,7 +681,7 @@ func (qb *ImageStore) makeFilter(ctx context.Context, imageFilter *models.ImageF query.handleCriterion(ctx, intCriterionHandler(imageFilter.OCounter, "images.o_counter", nil)) query.handleCriterion(ctx, boolCriterionHandler(imageFilter.Organized, "images.organized", nil)) query.handleCriterion(ctx, dateCriterionHandler(imageFilter.Date, "images.date")) - query.handleCriterion(ctx, stringCriterionHandler(imageFilter.URL, "images.url")) + query.handleCriterion(ctx, imageURLsCriterionHandler(imageFilter.URL)) query.handleCriterion(ctx, resolutionCriterionHandler(imageFilter.Resolution, "image_files.height", "image_files.width", qb.addImageFilesTable)) query.handleCriterion(ctx, imageIsMissingCriterionHandler(qb, imageFilter.IsMissing)) @@ -856,6 +872,18 @@ func imageIsMissingCriterionHandler(qb *ImageStore, isMissing *string) criterion } } +func imageURLsCriterionHandler(url *models.StringCriterionInput) criterionHandlerFunc { + h := stringListCriterionHandlerBuilder{ + joinTable: imagesURLsTable, + stringColumn: imageURLColumn, + addJoinTable: func(f *filterBuilder) { + imagesURLsTableMgr.join(f, "", "images.id") + }, + } + + return h.handler(url) +} + func (qb *ImageStore) getMultiCriterionHandlerBuilder(foreignTable, joinTable, foreignFK string, addJoinsFunc func(f *filterBuilder)) multiCriterionHandlerBuilder { return multiCriterionHandlerBuilder{ primaryTable: imageTable, @@ -1043,9 +1071,9 @@ func (qb *ImageStore) filesRepository() *filesRepository { } } -func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *ImageStore) GetGalleryIDs(ctx context.Context, imageID int) ([]int, error) { @@ -1098,3 +1126,7 @@ func (qb *ImageStore) UpdateTags(ctx context.Context, imageID int, tagIDs []int) // Delete the existing joins and then create new ones return qb.tagsRepository().replace(ctx, imageID, tagIDs) } + +func (qb *ImageStore) GetURLs(ctx context.Context, imageID int) ([]string, error) { + return imagesURLsTableMgr.get(ctx, imageID) +} diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index 4f3ebcc22ce..7735cb5ec43 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -10,12 +10,16 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) func loadImageRelationships(ctx context.Context, expected models.Image, actual *models.Image) error { + if expected.URLs.Loaded() { + if err := actual.LoadURLs(ctx, db.Image); err != nil { + return err + } + } if expected.GalleryIDs.Loaded() { if err := actual.LoadGalleryIDs(ctx, db.Image); err != nil { return err @@ -75,7 +79,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) { Title: title, Rating: &rating, Date: &date, - URL: url, + URLs: models.NewRelatedStrings([]string{url}), Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], @@ -93,12 +97,12 @@ func Test_imageQueryBuilder_Create(t *testing.T) { Title: title, Rating: &rating, Date: &date, - URL: url, + URLs: models.NewRelatedStrings([]string{url}), Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ - imageFile.(*file.ImageFile), + Files: models.NewRelatedFiles([]models.File{ + imageFile.(*models.ImageFile), }), PrimaryFileID: &imageFile.Base().ID, Path: imageFile.Base().Path, @@ -146,17 +150,14 @@ func Test_imageQueryBuilder_Create(t *testing.T) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) - var fileIDs []file.ID + var fileIDs []models.FileID if tt.newObject.Files.Loaded() { for _, f := range tt.newObject.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } } s := tt.newObject - if err := qb.Create(ctx, &models.ImageCreateInput{ - Image: &s, - FileIDs: fileIDs, - }); (err != nil) != tt.wantErr { + if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr { t.Errorf("imageQueryBuilder.Create() error = %v, wantErr = %v", err, tt.wantErr) } @@ -205,7 +206,7 @@ func clearImageFileIDs(image *models.Image) { } } -func makeImageFileWithID(i int) *file.ImageFile { +func makeImageFileWithID(i int) *models.ImageFile { ret := makeImageFile(i) ret.ID = imageFileIDs[i] return ret @@ -233,7 +234,7 @@ func Test_imageQueryBuilder_Update(t *testing.T) { ID: imageIDs[imageIdxWithGallery], Title: title, Rating: &rating, - URL: url, + URLs: models.NewRelatedStrings([]string{url}), Date: &date, Organized: true, OCounter: ocounter, @@ -382,7 +383,7 @@ func clearImagePartial() models.ImagePartial { return models.ImagePartial{ Title: models.OptionalString{Set: true, Null: true}, Rating: models.OptionalInt{Set: true, Null: true}, - URL: models.OptionalString{Set: true, Null: true}, + URLs: &models.UpdateStrings{Mode: models.RelationshipUpdateModeSet}, Date: models.OptionalDate{Set: true, Null: true}, StudioID: models.OptionalInt{Set: true, Null: true}, GalleryIDs: &models.UpdateIDs{Mode: models.RelationshipUpdateModeSet}, @@ -413,9 +414,12 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { "full", imageIDs[imageIdx1WithGallery], models.ImagePartial{ - Title: models.NewOptionalString(title), - Rating: models.NewOptionalInt(rating), - URL: models.NewOptionalString(url), + Title: models.NewOptionalString(title), + Rating: models.NewOptionalInt(rating), + URLs: &models.UpdateStrings{ + Values: []string{url}, + Mode: models.RelationshipUpdateModeSet, + }, Date: models.NewOptionalDate(date), Organized: models.NewOptionalBool(true), OCounter: models.NewOptionalInt(ocounter), @@ -439,12 +443,12 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { ID: imageIDs[imageIdx1WithGallery], Title: title, Rating: &rating, - URL: url, + URLs: models.NewRelatedStrings([]string{url}), Date: &date, Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeImageFile(imageIdx1WithGallery), }), CreatedAt: createdAt, @@ -462,7 +466,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { models.Image{ ID: imageIDs[imageIdx1WithGallery], OCounter: getOCounter(imageIdx1WithGallery), - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeImageFile(imageIdx1WithGallery), }), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -965,7 +969,7 @@ func makeImageWithID(index int) *models.Image { ret := makeImage(index) ret.ID = imageIDs[index] - ret.Files = models.NewRelatedFiles([]file.File{makeImageFile(index)}) + ret.Files = models.NewRelatedFiles([]models.File{makeImageFile(index)}) return ret } @@ -1153,15 +1157,15 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { tests := []struct { name string - fingerprints []file.Fingerprint + fingerprints []models.Fingerprint want []*models.Image wantErr bool }{ { "valid", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithGallery), }, }, @@ -1170,9 +1174,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "invalid", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: "invalid checksum", }, }, @@ -1181,9 +1185,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "with performers", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithTwoPerformers), }, }, @@ -1192,9 +1196,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "with tags", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithTwoTags), }, }, @@ -1316,7 +1320,7 @@ func imagesToIDs(i []*models.Image) []int { func Test_imageStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1365,7 +1369,7 @@ func Test_imageStore_FindByFileID(t *testing.T) { func Test_imageStore_FindByFolderID(t *testing.T) { tests := []struct { name string - folderID file.FolderID + folderID models.FolderID include []int exclude []int }{ @@ -1420,7 +1424,7 @@ func Test_imageStore_FindByFolderID(t *testing.T) { func Test_imageStore_FindByZipFileID(t *testing.T) { tests := []struct { name string - zipFileID file.ID + zipFileID models.FileID include []int exclude []int }{ @@ -1523,6 +1527,67 @@ func imageQueryQ(ctx context.Context, t *testing.T, sqb models.ImageReader, q st assert.Len(t, images, totalImages) } +func verifyImageQuery(t *testing.T, filter models.ImageFilterType, verifyFn func(ctx context.Context, s *models.Image)) { + t.Helper() + withTxn(func(ctx context.Context) error { + t.Helper() + sqb := db.Image + + images := queryImages(ctx, t, sqb, &filter, nil) + + // assume it should find at least one + assert.Greater(t, len(images), 0) + + for _, image := range images { + verifyFn(ctx, image) + } + + return nil + }) +} + +func TestImageQueryURL(t *testing.T) { + const imageIdx = 1 + imageURL := getImageStringValue(imageIdx, urlField) + urlCriterion := models.StringCriterionInput{ + Value: imageURL, + Modifier: models.CriterionModifierEquals, + } + filter := models.ImageFilterType{ + URL: &urlCriterion, + } + + verifyFn := func(ctx context.Context, o *models.Image) { + t.Helper() + + if err := o.LoadURLs(ctx, db.Image); err != nil { + t.Errorf("Error loading scene URLs: %v", err) + } + + urls := o.URLs.List() + var url string + if len(urls) > 0 { + url = urls[0] + } + + verifyString(t, url, urlCriterion) + } + + verifyImageQuery(t, filter, verifyFn) + urlCriterion.Modifier = models.CriterionModifierNotEquals + verifyImageQuery(t, filter, verifyFn) + urlCriterion.Modifier = models.CriterionModifierMatchesRegex + urlCriterion.Value = "image_.*1_URL" + verifyImageQuery(t, filter, verifyFn) + urlCriterion.Modifier = models.CriterionModifierNotMatchesRegex + verifyImageQuery(t, filter, verifyFn) + urlCriterion.Modifier = models.CriterionModifierIsNull + urlCriterion.Value = "" + verifyImageQuery(t, filter, verifyFn) + urlCriterion.Modifier = models.CriterionModifierNotNull + verifyImageQuery(t, filter, verifyFn) +} + func TestImageQueryPath(t *testing.T) { const imageIdx = 1 imagePath := getFilePath(folderIdxWithImageFiles, getImageBasename(imageIdx)) @@ -1868,11 +1933,12 @@ func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) { t.Errorf("Error loading primary file: %s", err.Error()) return nil } - asFrame, ok := image.Files.Primary().(file.VisualFile) + f := image.Files.Primary() + vf, ok := f.(models.VisualFile) if !ok { - t.Errorf("Error: Associated primary file of image is not of type VisualFile") + t.Errorf("Error: image primary file is not a visual file (is type %T)", f) } - verifyImageResolution(t, asFrame.GetHeight(), resolution) + verifyImageResolution(t, vf.GetHeight(), resolution) } return nil diff --git a/pkg/sqlite/migrations/48_premigrate.go b/pkg/sqlite/migrations/48_premigrate.go index b16c2258f9d..f0e59620e04 100644 --- a/pkg/sqlite/migrations/48_premigrate.go +++ b/pkg/sqlite/migrations/48_premigrate.go @@ -130,7 +130,7 @@ func (m *schema48PreMigrator) fixStudioNames(ctx context.Context) error { } } - logger.Info("Renaming duplicate studio id %d to %s", id, newName) + logger.Infof("Renaming duplicate studio id %d to %s", id, newName) _, err := m.db.Exec("UPDATE studios SET name = ? WHERE id = ?", newName, id) if err != nil { return err diff --git a/pkg/sqlite/migrations/49_postmigrate.go b/pkg/sqlite/migrations/49_postmigrate.go new file mode 100644 index 00000000000..941cf6a8802 --- /dev/null +++ b/pkg/sqlite/migrations/49_postmigrate.go @@ -0,0 +1,419 @@ +package migrations + +import ( + "context" + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sqlite" +) + +var migrate49TypeResolution = map[string][]string{ + "Boolean": { + /* + "organized", + "interactive", + "ignore_auto_tag", + "performer_favorite", + "filter_favorites", + */ + }, + "Int": { + "id", + "rating", + "rating100", + "o_counter", + "duration", + "tag_count", + "age", + "height", + "height_cm", + "weight", + "scene_count", + "marker_count", + "image_count", + "gallery_count", + "performer_count", + "interactive_speed", + "resume_time", + "play_count", + "play_duration", + "parent_count", + "child_count", + "performer_age", + "file_count", + }, + "Float": { + "penis_length", + }, + "Object": { + "tags", + "performers", + "studios", + "movies", + "galleries", + "parents", + "children", + "scene_tags", + "performer_tags", + }, +} +var migrate49NameChanges = map[string]string{ + "rating": "rating100", + "parent_studios": "parents", + "child_studios": "children", + "parent_tags": "parents", + "child_tags": "children", + "child_tag_count": "child_count", + "parent_tag_count": "parent_count", + "height": "height_cm", + "imageIsMissing": "is_missing", + "sceneIsMissing": "is_missing", + "galleryIsMissing": "is_missing", + "performerIsMissing": "is_missing", + "tagIsMissing": "is_missing", + "studioIsMissing": "is_missing", + "movieIsMissing": "is_missing", + "favorite": "filter_favorites", + "hasMarkers": "has_markers", + "parentTags": "parents", + "childTags": "children", + "phash": "phash_distance", + "scene_code": "code", + "hasChapters": "has_chapters", + "sceneChecksum": "checksum", + "galleryChecksum": "checksum", + "sceneTags": "scene_tags", + "performerTags": "performer_tags", +} + +func post49(ctx context.Context, db *sqlx.DB) error { + logger.Info("Running post-migration for schema version 49") + + m := schema49Migrator{ + migrator: migrator{ + db: db, + }, + } + + return m.migrateSavedFilters(ctx) +} + +type schema49Migrator struct { + migrator +} + +func (m *schema49Migrator) migrateSavedFilters(ctx context.Context) error { + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + rows, err := m.db.Query("SELECT id, mode, find_filter FROM saved_filters ORDER BY id") + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var ( + id int + mode models.FilterMode + findFilter string + ) + + err := rows.Scan(&id, &mode, &findFilter) + if err != nil { + return err + } + + asRawMessage := json.RawMessage(findFilter) + + newFindFilter, err := m.getFindFilter(asRawMessage) + if err != nil { + return fmt.Errorf("failed to get find filter for saved filter %d: %w", id, err) + } + + objectFilter, err := m.getObjectFilter(mode, asRawMessage) + if err != nil { + return fmt.Errorf("failed to get object filter for saved filter %d: %w", id, err) + } + + uiOptions, err := m.getDisplayOptions(asRawMessage) + if err != nil { + return fmt.Errorf("failed to get display options for saved filter %d: %w", id, err) + } + + _, err = m.db.Exec("UPDATE saved_filters SET find_filter = ?, object_filter = ?, ui_options = ? WHERE id = ?", newFindFilter, objectFilter, uiOptions, id) + if err != nil { + return fmt.Errorf("failed to update saved filter %d: %w", id, err) + } + } + + return rows.Err() + }); err != nil { + return err + } + + return nil +} + +func (m *schema49Migrator) getDisplayOptions(data json.RawMessage) (json.RawMessage, error) { + type displayOptions struct { + DisplayMode *int `json:"disp"` + ZoomIndex *int `json:"z"` + } + + var opts displayOptions + if err := json.Unmarshal(data, &opts); err != nil { + return nil, fmt.Errorf("failed to unmarshal display options: %w", err) + } + + ret := make(map[string]interface{}) + if opts.DisplayMode != nil { + ret["display_mode"] = *opts.DisplayMode + } + if opts.ZoomIndex != nil { + ret["zoom_index"] = *opts.ZoomIndex + } + + return json.Marshal(ret) +} + +func (m *schema49Migrator) getFindFilter(data json.RawMessage) (json.RawMessage, error) { + type findFilterJson struct { + Q *string `json:"q"` + Page *int `json:"page"` + PerPage *int `json:"perPage"` + Sort *string `json:"sortby"` + Direction *string `json:"sortdir"` + } + + ppDefault := 40 + pageDefault := 1 + qDefault := "" + sortDefault := "date" + asc := "asc" + ff := findFilterJson{Q: &qDefault, Page: &pageDefault, PerPage: &ppDefault, Sort: &sortDefault, Direction: &asc} + if err := json.Unmarshal(data, &ff); err != nil { + return nil, fmt.Errorf("failed to unmarshal find filter: %w", err) + } + + newDir := strings.ToUpper(*ff.Direction) + ff.Direction = &newDir + + type findFilterRewrite struct { + Q *string `json:"q"` + Page *int `json:"page"` + PerPage *int `json:"per_page"` + Sort *string `json:"sort"` + Direction *string `json:"direction"` + } + + fr := findFilterRewrite(ff) + + return json.Marshal(fr) +} + +func (m *schema49Migrator) getObjectFilter(mode models.FilterMode, data json.RawMessage) (json.RawMessage, error) { + type criteriaJson struct { + Criteria []string `json:"c"` + } + + var c criteriaJson + if err := json.Unmarshal(data, &c); err != nil { + return nil, fmt.Errorf("failed to unmarshal object filter: %w", err) + } + + ret := make(map[string]interface{}) + for _, raw := range c.Criteria { + if err := m.convertCriterion(mode, ret, raw); err != nil { + return nil, err + } + } + + return json.Marshal(ret) +} + +func (m *schema49Migrator) convertCriterion(mode models.FilterMode, out map[string]interface{}, criterion string) error { + // convert to a map + ret := make(map[string]interface{}) + + if err := json.Unmarshal([]byte(criterion), &ret); err != nil { + return fmt.Errorf("failed to unmarshal criterion: %w", err) + } + + field := ret["type"].(string) + // Some names are deprecated + if newFieldName, ok := migrate49NameChanges[field]; ok { + field = newFieldName + } + delete(ret, "type") + + // Find out whether the object needs some adjustment/has non-string content attached + // Only adjust if value is present + if v, ok := ret["value"]; ok && v != nil { + var err error + switch { + case arrayContains(migrate49TypeResolution["Boolean"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "bool") + case arrayContains(migrate49TypeResolution["Int"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "int") + case arrayContains(migrate49TypeResolution["Float"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "float64") + case arrayContains(migrate49TypeResolution["Object"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "object") + } + + if err != nil { + return fmt.Errorf("failed to adjust criterion value for %q: %w", field, err) + } + } + + out[field] = ret + + return nil +} + +func arrayContains(sl []string, name string) bool { + for _, value := range sl { + if value == name { + return true + } + } + return false +} + +// General Function for converting the types inside a criterion +func (m *schema49Migrator) adjustCriterionValue(value interface{}, typ string) (interface{}, error) { + if mapvalue, ok := value.(map[string]interface{}); ok { + // Primitive values and lists of them + var err error + for _, next := range []string{"value", "value2"} { + if valmap, ok := mapvalue[next].([]string); ok { + var valNewMap []interface{} + for index, v := range valmap { + valNewMap[index], err = m.convertValue(v, typ) + if err != nil { + return nil, err + } + } + mapvalue[next] = valNewMap + } else if _, ok := mapvalue[next]; ok { + mapvalue[next], err = m.convertValue(mapvalue[next], typ) + if err != nil { + return nil, err + } + } + } + // Items + for _, next := range []string{"items", "excluded"} { + if _, ok := mapvalue[next]; ok { + mapvalue[next], err = m.adjustCriterionItem(mapvalue[next]) + if err != nil { + return nil, err + } + } + } + + // Those Values are always Int + for _, next := range []string{"Distance", "Depth"} { + if _, ok := mapvalue[next]; ok { + mapvalue[next], err = strconv.ParseInt(mapvalue[next].(string), 10, 64) + if err != nil { + return nil, err + } + } + } + return mapvalue, nil + } else if _, ok := value.(string); ok { + // Singular Primitive Values + return m.convertValue(value, typ) + } else if listvalue, ok := value.([]interface{}); ok { + // Items as a singular value, as well as singular lists + var err error + if typ == "object" { + value, err = m.adjustCriterionItem(value) + if err != nil { + return nil, err + } + } else { + for index, val := range listvalue { + listvalue[index], err = m.convertValue(val, typ) + if err != nil { + return nil, err + } + } + value = listvalue + } + + return value, nil + } else if _, ok := value.(int); ok { + return value, nil + } else if _, ok := value.(float64); ok { + return value, nil + } + + return nil, fmt.Errorf("could not recognize format of value %v", value) +} + +// Converts values inside a criterion that represent some objects, like performer or studio. +func (m *schema49Migrator) adjustCriterionItem(value interface{}) (interface{}, error) { + // Basically, this first converts step by step the value, after that it adjusts id and Depth (of parent/child studios) to int + if itemlist, ok := value.([]interface{}); ok { + var itemNewList []interface{} + for _, val := range itemlist { + if val, ok := val.(map[string]interface{}); ok { + newItem := make(map[string]interface{}) + for index, v := range val { + if v, ok := v.(string); ok { + switch index { + case "id": + if formattedOut, ok := strconv.ParseInt(v, 10, 64); ok == nil { + newItem["id"] = formattedOut + } + case "Depth": + if formattedOut, ok := strconv.ParseInt(v, 10, 64); ok == nil { + newItem["Depth"] = formattedOut + } + default: + newItem[index] = v + } + } + } + itemNewList = append(itemNewList, newItem) + } + } + return itemNewList, nil + } + return nil, fmt.Errorf("could not recognize %v as an item list", value) +} + +// Converts a value of type string to its according type, given by string +func (m *schema49Migrator) convertValue(value interface{}, typ string) (interface{}, error) { + valueType := reflect.TypeOf(value).Name() + if typ == valueType || (typ == "int" && valueType == "float64") || (typ == "float64" && valueType == "int") || value == "" { + return value, nil + } + + if val, ok := value.(string); ok { + switch typ { + case "float64": + return strconv.ParseFloat(val, 64) + case "int": + return strconv.ParseInt(val, 10, 64) + case "bool": + return strconv.ParseBool(val) + default: + return nil, fmt.Errorf("no valid conversion type for %v, need bool, int or float64", typ) + } + } + + return nil, fmt.Errorf("cannot convert %v (%T) to %s", value, value, typ) +} + +func init() { + sqlite.RegisterPostMigration(49, post49) +} diff --git a/pkg/sqlite/migrations/49_saved_filter_refactor.up.sql b/pkg/sqlite/migrations/49_saved_filter_refactor.up.sql new file mode 100644 index 00000000000..c769a9e4b8e --- /dev/null +++ b/pkg/sqlite/migrations/49_saved_filter_refactor.up.sql @@ -0,0 +1,34 @@ +PRAGMA foreign_keys=OFF; + +-- remove filter column +CREATE TABLE `saved_filters_new` ( + `id` integer not null primary key autoincrement, + `name` varchar(510) not null, + `mode` varchar(255) not null, + `find_filter` blob, + `object_filter` blob, + `ui_options` blob +); + +-- move filter data into find_filter to be migrated in the post-migration +INSERT INTO `saved_filters_new` + ( + `id`, + `name`, + `mode`, + `find_filter` + ) + SELECT + `id`, + `name`, + `mode`, + `filter` + FROM `saved_filters`; + +DROP INDEX `index_saved_filters_on_mode_name_unique`; +DROP TABLE `saved_filters`; +ALTER TABLE `saved_filters_new` rename to `saved_filters`; + +CREATE UNIQUE INDEX `index_saved_filters_on_mode_name_unique` on `saved_filters` (`mode`, `name`); + +PRAGMA foreign_keys=ON; diff --git a/pkg/sqlite/migrations/50_image_urls.up.sql b/pkg/sqlite/migrations/50_image_urls.up.sql new file mode 100644 index 00000000000..47ff373075b --- /dev/null +++ b/pkg/sqlite/migrations/50_image_urls.up.sql @@ -0,0 +1,70 @@ +PRAGMA foreign_keys=OFF; + +CREATE TABLE `image_urls` ( + `image_id` integer NOT NULL, + `position` integer NOT NULL, + `url` varchar(255) NOT NULL, + foreign key(`image_id`) references `images`(`id`) on delete CASCADE, + PRIMARY KEY(`image_id`, `position`, `url`) +); + +CREATE INDEX `image_urls_url` on `image_urls` (`url`); + +-- drop url +CREATE TABLE "images_new" ( + `id` integer not null primary key autoincrement, + `title` varchar(255), + `rating` tinyint, + `studio_id` integer, + `o_counter` tinyint not null default 0, + `organized` boolean not null default '0', + `created_at` datetime not null, + `updated_at` datetime not null, + `date` date, + foreign key(`studio_id`) references `studios`(`id`) on delete SET NULL +); + +INSERT INTO `images_new` + ( + `id`, + `title`, + `rating`, + `studio_id`, + `o_counter`, + `organized`, + `created_at`, + `updated_at`, + `date` + ) + SELECT + `id`, + `title`, + `rating`, + `studio_id`, + `o_counter`, + `organized`, + `created_at`, + `updated_at`, + `date` + FROM `images`; + +INSERT INTO `image_urls` + ( + `image_id`, + `position`, + `url` + ) + SELECT + `id`, + '0', + `url` + FROM `images` + WHERE `images`.`url` IS NOT NULL AND `images`.`url` != ''; + +DROP INDEX `index_images_on_studio_id`; +DROP TABLE `images`; +ALTER TABLE `images_new` rename to `images`; + +CREATE INDEX `index_images_on_studio_id` on `images` (`studio_id`); + +PRAGMA foreign_keys=ON; diff --git a/pkg/sqlite/movies_test.go b/pkg/sqlite/movies_test.go index ed0ef724291..9b9615fbd90 100644 --- a/pkg/sqlite/movies_test.go +++ b/pkg/sqlite/movies_test.go @@ -291,7 +291,7 @@ func TestMovieUpdateFrontImage(t *testing.T) { // create movie to test against const name = "TestMovieUpdateMovieImages" movie := models.Movie{ - Name: name, + Name: name, } err := qb.Create(ctx, &movie) if err != nil { @@ -311,7 +311,7 @@ func TestMovieUpdateBackImage(t *testing.T) { // create movie to test against const name = "TestMovieUpdateMovieImages" movie := models.Movie{ - Name: name, + Name: name, } err := qb.Create(ctx, &movie) if err != nil { diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index 2292e868a62..c65965fe787 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -9,7 +9,6 @@ import ( "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) @@ -336,7 +335,7 @@ type captionRepository struct { repository } -func (r *captionRepository) get(ctx context.Context, id file.ID) ([]*models.VideoCaption, error) { +func (r *captionRepository) get(ctx context.Context, id models.FileID) ([]*models.VideoCaption, error) { query := fmt.Sprintf("SELECT %s, %s, %s from %s WHERE %s = ?", captionCodeColumn, captionFilenameColumn, captionTypeColumn, r.tableName, r.idColumn) var ret []*models.VideoCaption err := r.queryFunc(ctx, query, []interface{}{id}, false, func(rows *sqlx.Rows) error { @@ -359,12 +358,12 @@ func (r *captionRepository) get(ctx context.Context, id file.ID) ([]*models.Vide return ret, err } -func (r *captionRepository) insert(ctx context.Context, id file.ID, caption *models.VideoCaption) (sql.Result, error) { +func (r *captionRepository) insert(ctx context.Context, id models.FileID, caption *models.VideoCaption) (sql.Result, error) { stmt := fmt.Sprintf("INSERT INTO %s (%s, %s, %s, %s) VALUES (?, ?, ?, ?)", r.tableName, r.idColumn, captionCodeColumn, captionFilenameColumn, captionTypeColumn) return r.tx.Exec(ctx, stmt, id, caption.LanguageCode, caption.Filename, caption.CaptionType) } -func (r *captionRepository) replace(ctx context.Context, id file.ID, captions []*models.VideoCaption) error { +func (r *captionRepository) replace(ctx context.Context, id models.FileID, captions []*models.VideoCaption) error { if err := r.destroy(ctx, []int{int(id)}); err != nil { return err } @@ -443,12 +442,12 @@ type filesRepository struct { } type relatedFileRow struct { - ID int `db:"id"` - FileID file.ID `db:"file_id"` - Primary bool `db:"primary"` + ID int `db:"id"` + FileID models.FileID `db:"file_id"` + Primary bool `db:"primary"` } -func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]file.ID, error) { +func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]models.FileID, error) { var primaryClause string if primaryOnly { primaryClause = " AND `primary` = 1" @@ -476,7 +475,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo return nil, err } - ret := make([][]file.ID, len(ids)) + ret := make([][]models.FileID, len(ids)) idToIndex := make(map[int]int) for i, id := range ids { idToIndex[id] = i @@ -488,7 +487,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo if row.Primary { // prepend to list - ret[idToIndex[id]] = append([]file.ID{fileID}, ret[idToIndex[id]]...) + ret[idToIndex[id]] = append([]models.FileID{fileID}, ret[idToIndex[id]]...) } else { ret[idToIndex[id]] = append(ret[idToIndex[id]], row.FileID) } @@ -497,15 +496,15 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo return ret, nil } -func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) { +func (r *filesRepository) get(ctx context.Context, id int) ([]models.FileID, error) { query := fmt.Sprintf("SELECT file_id, `primary` from %s WHERE %s = ?", r.tableName, r.idColumn) type relatedFile struct { - FileID file.ID `db:"file_id"` - Primary bool `db:"primary"` + FileID models.FileID `db:"file_id"` + Primary bool `db:"primary"` } - var ret []file.ID + var ret []models.FileID if err := r.queryFunc(ctx, query, []interface{}{id}, false, func(rows *sqlx.Rows) error { var f relatedFile @@ -515,7 +514,7 @@ func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) { if f.Primary { // prepend to list - ret = append([]file.ID{f.FileID}, ret...) + ret = append([]models.FileID{f.FileID}, ret...) } else { ret = append(ret, f.FileID) } diff --git a/pkg/sqlite/saved_filter.go b/pkg/sqlite/saved_filter.go index f4b55fe72ef..6b92b7657b4 100644 --- a/pkg/sqlite/saved_filter.go +++ b/pkg/sqlite/saved_filter.go @@ -3,6 +3,7 @@ package sqlite import ( "context" "database/sql" + "encoding/json" "errors" "fmt" @@ -10,6 +11,7 @@ import ( "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" ) @@ -20,25 +22,67 @@ const ( ) type savedFilterRow struct { - ID int `db:"id" goqu:"skipinsert"` - Mode string `db:"mode"` - Name string `db:"name"` - Filter string `db:"filter"` + ID int `db:"id" goqu:"skipinsert"` + Mode models.FilterMode `db:"mode"` + Name string `db:"name"` + FindFilter string `db:"find_filter"` + ObjectFilter string `db:"object_filter"` + UIOptions string `db:"ui_options"` +} + +func encodeJSONOrEmpty(v interface{}) string { + if v == nil { + return "" + } + + encoded, err := json.Marshal(v) + if err != nil { + logger.Errorf("error encoding json %v: %v", v, err) + } + + return string(encoded) +} + +func decodeJSON(s string, v interface{}) { + if s == "" { + return + } + + if err := json.Unmarshal([]byte(s), v); err != nil { + logger.Errorf("error decoding json %q: %v", s, err) + } } func (r *savedFilterRow) fromSavedFilter(o models.SavedFilter) { r.ID = o.ID - r.Mode = string(o.Mode) + r.Mode = o.Mode r.Name = o.Name - r.Filter = o.Filter + + // encode the filters as json + r.FindFilter = encodeJSONOrEmpty(o.FindFilter) + r.ObjectFilter = encodeJSONOrEmpty(o.ObjectFilter) + r.UIOptions = encodeJSONOrEmpty(o.UIOptions) } func (r *savedFilterRow) resolve() *models.SavedFilter { ret := &models.SavedFilter{ - ID: r.ID, - Name: r.Name, - Mode: models.FilterMode(r.Mode), - Filter: r.Filter, + ID: r.ID, + Mode: r.Mode, + Name: r.Name, + } + + // decode the filters from json + if r.FindFilter != "" { + ret.FindFilter = &models.FindFilterType{} + decodeJSON(r.FindFilter, &ret.FindFilter) + } + if r.ObjectFilter != "" { + ret.ObjectFilter = make(map[string]interface{}) + decodeJSON(r.ObjectFilter, &ret.ObjectFilter) + } + if r.UIOptions != "" { + ret.UIOptions = make(map[string]interface{}) + decodeJSON(r.UIOptions, &ret.UIOptions) } return ret @@ -46,7 +90,6 @@ func (r *savedFilterRow) resolve() *models.SavedFilter { type SavedFilterStore struct { repository - tableMgr *table } @@ -77,7 +120,7 @@ func (qb *SavedFilterStore) Create(ctx context.Context, newObject *models.SavedF return err } - updated, err := qb.find(ctx, id) + updated, err := qb.Find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } @@ -166,7 +209,6 @@ func (qb *SavedFilterStore) find(ctx context.Context, id int) (*models.SavedFilt return ret, nil } -// returns nil, sql.ErrNoRows if not found func (qb *SavedFilterStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.SavedFilter, error) { ret, err := qb.getMany(ctx, q) if err != nil { diff --git a/pkg/sqlite/saved_filter_test.go b/pkg/sqlite/saved_filter_test.go index 0a6e32a1ca4..aa98121fd45 100644 --- a/pkg/sqlite/saved_filter_test.go +++ b/pkg/sqlite/saved_filter_test.go @@ -42,15 +42,35 @@ func TestSavedFilterFindByMode(t *testing.T) { func TestSavedFilterDestroy(t *testing.T) { const filterName = "filterToDestroy" - const testFilter = "{}" + filterQ := "" + filterPage := 1 + filterPerPage := 40 + filterSort := "date" + filterDirection := models.SortDirectionEnumAsc + findFilter := models.FindFilterType{ + Q: &filterQ, + Page: &filterPage, + PerPage: &filterPerPage, + Sort: &filterSort, + Direction: &filterDirection, + } + objectFilter := map[string]interface{}{ + "test": "foo", + } + uiOptions := map[string]interface{}{ + "display_mode": 1, + "zoom_index": 1, + } var id int // create the saved filter to destroy withTxn(func(ctx context.Context) error { newFilter := models.SavedFilter{ - Name: filterName, - Mode: models.FilterModeScenes, - Filter: testFilter, + Name: filterName, + Mode: models.FilterModeScenes, + FindFilter: &findFilter, + ObjectFilter: objectFilter, + UIOptions: uiOptions, } err := db.SavedFilter.Create(ctx, &newFilter) @@ -88,12 +108,32 @@ func TestSavedFilterFindDefault(t *testing.T) { } func TestSavedFilterSetDefault(t *testing.T) { - const newFilter = "foo" + filterQ := "" + filterPage := 1 + filterPerPage := 40 + filterSort := "date" + filterDirection := models.SortDirectionEnumAsc + findFilter := models.FindFilterType{ + Q: &filterQ, + Page: &filterPage, + PerPage: &filterPerPage, + Sort: &filterSort, + Direction: &filterDirection, + } + objectFilter := map[string]interface{}{ + "test": "foo", + } + uiOptions := map[string]interface{}{ + "display_mode": 1, + "zoom_index": 1, + } withTxn(func(ctx context.Context) error { err := db.SavedFilter.SetDefault(ctx, &models.SavedFilter{ - Mode: models.FilterModeMovies, - Filter: newFilter, + Mode: models.FilterModeMovies, + FindFilter: &findFilter, + ObjectFilter: objectFilter, + UIOptions: uiOptions, }) return err @@ -104,7 +144,7 @@ func TestSavedFilterSetDefault(t *testing.T) { def, err := db.SavedFilter.FindDefault(ctx, models.FilterModeMovies) if err == nil { defID = def.ID - assert.Equal(t, newFilter, def.Filter) + assert.Equal(t, &findFilter, def.FindFilter) } return err diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index 8fc37937b8f..215c1740953 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -17,7 +17,6 @@ import ( "gopkg.in/guregu/null.v4" "gopkg.in/guregu/null.v4/zero" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stashapp/stash/pkg/utils" @@ -232,13 +231,13 @@ func (qb *SceneStore) selectDataset() *goqu.SelectDataset { checksum, goqu.On( checksum.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)), - checksum.Col("type").Eq(file.FingerprintTypeMD5), + checksum.Col("type").Eq(models.FingerprintTypeMD5), ), ).LeftJoin( oshash, goqu.On( oshash.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)), - oshash.Col("type").Eq(file.FingerprintTypeOshash), + oshash.Col("type").Eq(models.FingerprintTypeOshash), ), ).Select( qb.table().All(), @@ -250,7 +249,7 @@ func (qb *SceneStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileIDs []file.ID) error { +func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileIDs []models.FileID) error { var r sceneRow r.fromScene(*newObject) @@ -411,7 +410,7 @@ func (qb *SceneStore) Update(ctx context.Context, updatedObject *models.Scene) e } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.ID } @@ -538,7 +537,7 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } -func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, error) { +func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*models.VideoFile, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -550,10 +549,10 @@ func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, return nil, err } - ret := make([]*file.VideoFile, len(files)) + ret := make([]*models.VideoFile, len(files)) for i, f := range files { var ok bool - ret[i], ok = f.(*file.VideoFile) + ret[i], ok = f.(*models.VideoFile) if !ok { return nil, fmt.Errorf("expected file to be *file.VideoFile not %T", f) } @@ -562,12 +561,12 @@ func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, return ret, nil } -func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } -func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) { +func (qb *SceneStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { sq := dialect.From(scenesFilesJoinTable).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where( scenesFilesJoinTable.Col(fileIDColumn).Eq(fileID), ) @@ -580,7 +579,7 @@ func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode return ret, nil } -func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) { +func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { sq := dialect.From(scenesFilesJoinTable).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where( scenesFilesJoinTable.Col(fileIDColumn).Eq(fileID), scenesFilesJoinTable.Col("primary").Eq(1), @@ -594,14 +593,14 @@ func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID file.ID) ( return ret, nil } -func (qb *SceneStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *SceneStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := scenesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) { +func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) { fingerprintTable := fingerprintTableMgr.table var ex []exp.Expression @@ -629,18 +628,18 @@ func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr } func (qb *SceneStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) } func (qb *SceneStore) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeOshash, + Type: models.FingerprintTypeOshash, Fingerprint: oshash, }, }) @@ -1684,7 +1683,7 @@ func (qb *SceneStore) destroyCover(ctx context.Context, sceneID int) error { return qb.DestroyImage(ctx, sceneID, sceneCoverBlobColumn) } -func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []file.ID) error { +func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []models.FileID) error { // assuming a file can only be assigned to a single scene if err := scenesFilesTableMgr.destroyJoins(ctx, fileIDs); err != nil { return err @@ -1736,9 +1735,9 @@ func (qb *SceneStore) filesRepository() *filesRepository { } } -func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *SceneStore) performersRepository() *joinRepository { diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index 8ab34a112f6..0da236f4d7d 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -14,7 +14,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stretchr/testify/assert" @@ -165,8 +164,8 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ - videoFile.(*file.VideoFile), + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ + videoFile.(*models.VideoFile), }), CreatedAt: createdAt, UpdatedAt: updatedAt, @@ -248,7 +247,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) - var fileIDs []file.ID + var fileIDs []models.FileID if tt.newObject.Files.Loaded() { for _, f := range tt.newObject.Files.List() { fileIDs = append(fileIDs, f.ID) @@ -308,7 +307,7 @@ func clearSceneFileIDs(scene *models.Scene) { } } -func makeSceneFileWithID(i int) *file.VideoFile { +func makeSceneFileWithID(i int) *models.VideoFile { ret := makeSceneFile(i) ret.ID = sceneFileIDs[i] return ret @@ -626,7 +625,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { }, models.Scene{ ID: sceneIDs[sceneIdxWithSpacedName], - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ makeSceneFile(sceneIdxWithSpacedName), }), Title: title, @@ -678,7 +677,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { models.Scene{ ID: sceneIDs[sceneIdxWithSpacedName], OCounter: getOCounter(sceneIdxWithSpacedName), - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ makeSceneFile(sceneIdxWithSpacedName), }), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -1460,7 +1459,7 @@ func makeSceneWithID(index int) *models.Scene { ret := makeScene(index) ret.ID = sceneIDs[index] - ret.Files = models.NewRelatedVideoFiles([]*file.VideoFile{makeSceneFile(index)}) + ret.Files = models.NewRelatedVideoFiles([]*models.VideoFile{makeSceneFile(index)}) return ret } @@ -1891,7 +1890,7 @@ func scenesToIDs(i []*models.Scene) []int { func Test_sceneStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1940,7 +1939,7 @@ func Test_sceneStore_FindByFileID(t *testing.T) { func Test_sceneStore_CountByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID want int }{ { @@ -3053,8 +3052,8 @@ func queryScenes(ctx context.Context, t *testing.T, queryBuilder models.SceneRea func createScene(ctx context.Context, width int, height int) (*models.Scene, error) { name := fmt.Sprintf("TestSceneQueryResolutionModifiers %d %d", width, height) - sceneFile := &file.VideoFile{ - BaseFile: &file.BaseFile{ + sceneFile := &models.VideoFile{ + BaseFile: &models.BaseFile{ Basename: name, ParentFolderID: folderIDs[folderIdxWithSceneFiles], }, @@ -3068,7 +3067,7 @@ func createScene(ctx context.Context, width int, height int) (*models.Scene, err scene := &models.Scene{} - if err := db.Scene.Create(ctx, scene, []file.ID{sceneFile.ID}); err != nil { + if err := db.Scene.Create(ctx, scene, []models.FileID{sceneFile.ID}); err != nil { return nil, err } @@ -4559,7 +4558,7 @@ func TestSceneStore_AssignFiles(t *testing.T) { tests := []struct { name string sceneID int - fileID file.ID + fileID models.FileID wantErr bool }{ { @@ -4587,7 +4586,7 @@ func TestSceneStore_AssignFiles(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { withRollbackTxn(func(ctx context.Context) error { - if err := qb.AssignFiles(ctx, tt.sceneID, []file.ID{tt.fileID}); (err != nil) != tt.wantErr { + if err := qb.AssignFiles(ctx, tt.sceneID, []models.FileID{tt.fileID}); (err != nil) != tt.wantErr { t.Errorf("SceneStore.AssignFiles() error = %v, wantErr %v", err, tt.wantErr) } diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index c57f272c7d4..e182ef99b5b 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -14,7 +14,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stashapp/stash/pkg/sqlite" @@ -283,11 +282,11 @@ const ( ) var ( - folderIDs []file.FolderID - fileIDs []file.ID - sceneFileIDs []file.ID - imageFileIDs []file.ID - galleryFileIDs []file.ID + folderIDs []models.FolderID + fileIDs []models.FileID + sceneFileIDs []models.FileID + imageFileIDs []models.FileID + galleryFileIDs []models.FileID chapterIDs []int sceneIDs []int @@ -700,8 +699,8 @@ func getFolderModTime(index int) time.Time { return time.Date(2000, 1, (index%10)+1, 0, 0, 0, 0, time.UTC) } -func makeFolder(i int) file.Folder { - var folderID *file.FolderID +func makeFolder(i int) models.Folder { + var folderID *models.FolderID var folderIdx *int if pidx, ok := folderParentFolders[i]; ok { folderIdx = &pidx @@ -709,9 +708,9 @@ func makeFolder(i int) file.Folder { folderID = &v } - return file.Folder{ + return models.Folder{ ParentFolderID: folderID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ // zip files have to be added after creating files ModTime: getFolderModTime(i), }, @@ -748,8 +747,8 @@ func getFileModTime(index int) time.Time { return getFolderModTime(index) } -func getFileFingerprints(index int) []file.Fingerprint { - return []file.Fingerprint{ +func getFileFingerprints(index int) []models.Fingerprint { + return []models.Fingerprint{ { Type: "MD5", Fingerprint: getPrefixedStringValue("file", index, "md5"), @@ -772,22 +771,22 @@ func getFileDuration(index int) float64 { return float64(duration) + 0.432 } -func makeFile(i int) file.File { +func makeFile(i int) models.File { folderID := folderIDs[fileFolders[i]] if folderID == 0 { folderID = folderIDs[folderIdxWithFiles] } - var zipFileID *file.ID + var zipFileID *models.FileID if zipFileIndex, found := fileZipFiles[i]; found { zipFileID = &fileIDs[zipFileIndex] } - var ret file.File - baseFile := &file.BaseFile{ + var ret models.File + baseFile := &models.BaseFile{ Basename: getFileBaseName(i), ParentFolderID: folderID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ // zip files have to be added after creating files ModTime: getFileModTime(i), ZipFileID: zipFileID, @@ -799,7 +798,7 @@ func makeFile(i int) file.File { ret = baseFile if i >= fileIdxStartVideoFiles && i < fileIdxStartImageFiles { - ret = &file.VideoFile{ + ret = &models.VideoFile{ BaseFile: baseFile, Format: getFileStringValue(i, "format"), Width: getWidth(i), @@ -811,7 +810,7 @@ func makeFile(i int) file.File { BitRate: int64(getFileDuration(i)) * 3, } } else if i >= fileIdxStartImageFiles && i < fileIdxStartGalleryFiles { - ret = &file.ImageFile{ + ret = &models.ImageFile{ BaseFile: baseFile, Format: getFileStringValue(i, "format"), Width: getWidth(i), @@ -977,27 +976,27 @@ func getSceneBasename(index int) string { return getSceneStringValue(index, pathField) } -func makeSceneFile(i int) *file.VideoFile { - fp := []file.Fingerprint{ +func makeSceneFile(i int) *models.VideoFile { + fp := []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getSceneStringValue(i, checksumField), }, { - Type: file.FingerprintTypeOshash, + Type: models.FingerprintTypeOshash, Fingerprint: getSceneStringValue(i, "oshash"), }, } if i != sceneIdxMissingPhash { - fp = append(fp, file.Fingerprint{ - Type: file.FingerprintTypePhash, + fp = append(fp, models.Fingerprint{ + Type: models.FingerprintTypePhash, Fingerprint: getScenePhash(i, "phash"), }) } - return &file.VideoFile{ - BaseFile: &file.BaseFile{ + return &models.VideoFile{ + BaseFile: &models.BaseFile{ Path: getFilePath(folderIdxWithSceneFiles, getSceneBasename(i)), Basename: getSceneBasename(i), ParentFolderID: folderIDs[folderIdxWithSceneFiles], @@ -1100,7 +1099,7 @@ func createScenes(ctx context.Context, n int) error { scene := makeScene(i) - if err := sqb.Create(ctx, scene, []file.ID{f.ID}); err != nil { + if err := sqb.Create(ctx, scene, []models.FileID{f.ID}); err != nil { return fmt.Errorf("Error creating scene %v+: %s", scene, err.Error()) } @@ -1114,19 +1113,32 @@ func getImageStringValue(index int, field string) string { return fmt.Sprintf("image_%04d_%s", index, field) } +func getImageNullStringPtr(index int, field string) *string { + return getStringPtrFromNullString(getPrefixedNullStringValue("image", index, field)) +} + +func getImageEmptyString(index int, field string) string { + v := getImageNullStringPtr(index, field) + if v == nil { + return "" + } + + return *v +} + func getImageBasename(index int) string { return getImageStringValue(index, pathField) } -func makeImageFile(i int) *file.ImageFile { - return &file.ImageFile{ - BaseFile: &file.BaseFile{ +func makeImageFile(i int) *models.ImageFile { + return &models.ImageFile{ + BaseFile: &models.BaseFile{ Path: getFilePath(folderIdxWithImageFiles, getImageBasename(i)), Basename: getImageBasename(i), ParentFolderID: folderIDs[folderIdxWithImageFiles], - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getImageStringValue(i, checksumField), }, }, @@ -1149,10 +1161,12 @@ func makeImage(i int) *models.Image { tids := indexesToIDs(tagIDs, imageTags[i]) return &models.Image{ - Title: title, - Rating: getIntPtr(getRating(i)), - Date: getObjectDate(i), - URL: getImageStringValue(i, urlField), + Title: title, + Rating: getIntPtr(getRating(i)), + Date: getObjectDate(i), + URLs: models.NewRelatedStrings([]string{ + getImageEmptyString(i, urlField), + }), OCounter: getOCounter(i), StudioID: studioID, GalleryIDs: models.NewRelatedIDs(gids), @@ -1178,10 +1192,7 @@ func createImages(ctx context.Context, n int) error { image := makeImage(i) - err := qb.Create(ctx, &models.ImageCreateInput{ - Image: image, - FileIDs: []file.ID{f.ID}, - }) + err := qb.Create(ctx, image, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Error creating image %v+: %s", image, err.Error()) @@ -1209,14 +1220,14 @@ func getGalleryBasename(index int) string { return getGalleryStringValue(index, pathField) } -func makeGalleryFile(i int) *file.BaseFile { - return &file.BaseFile{ +func makeGalleryFile(i int) *models.BaseFile { + return &models.BaseFile{ Path: getFilePath(folderIdxWithGalleryFiles, getGalleryBasename(i)), Basename: getGalleryBasename(i), ParentFolderID: folderIDs[folderIdxWithGalleryFiles], - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getGalleryStringValue(i, checksumField), }, }, @@ -1255,14 +1266,14 @@ func createGalleries(ctx context.Context, n int) error { fqb := db.File for i := 0; i < n; i++ { - var fileIDs []file.ID + var fileIDs []models.FileID if i != galleryIdxWithoutFile { f := makeGalleryFile(i) if err := fqb.Create(ctx, f); err != nil { return fmt.Errorf("creating gallery file: %w", err) } galleryFileIDs = append(galleryFileIDs, f.ID) - fileIDs = []file.ID{f.ID} + fileIDs = []models.FileID{f.ID} } else { galleryFileIDs = append(galleryFileIDs, 0) } @@ -1714,10 +1725,29 @@ func getSavedFilterName(index int) string { func createSavedFilters(ctx context.Context, qb models.SavedFilterReaderWriter, n int) error { for i := 0; i < n; i++ { + filterQ := "" + filterPage := i + filterPerPage := i * 40 + filterSort := "date" + filterDirection := models.SortDirectionEnumAsc + findFilter := models.FindFilterType{ + Q: &filterQ, + Page: &filterPage, + PerPage: &filterPerPage, + Sort: &filterSort, + Direction: &filterDirection, + } savedFilter := models.SavedFilter{ - Mode: getSavedFilterMode(i), - Name: getSavedFilterName(i), - Filter: getPrefixedStringValue("savedFilter", i, "Filter"), + Mode: getSavedFilterMode(i), + Name: getSavedFilterName(i), + FindFilter: &findFilter, + ObjectFilter: map[string]interface{}{ + "test": "object", + }, + UIOptions: map[string]interface{}{ + "display_mode": 1, + "zoom_index": 1, + }, } err := qb.Create(ctx, &savedFilter) diff --git a/pkg/sqlite/table.go b/pkg/sqlite/table.go index e3cedce37d1..510b5877cf8 100644 --- a/pkg/sqlite/table.go +++ b/pkg/sqlite/table.go @@ -11,7 +11,6 @@ import ( "github.com/jmoiron/sqlx" "gopkg.in/guregu/null.v4" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil" @@ -707,12 +706,12 @@ type relatedFilesTable struct { } // type scenesFilesRow struct { -// SceneID int `db:"scene_id"` -// Primary bool `db:"primary"` -// FileID file.ID `db:"file_id"` +// SceneID int `db:"scene_id"` +// Primary bool `db:"primary"` +// FileID models.FileID `db:"file_id"` // } -func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool, fileID file.ID) error { +func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool, fileID models.FileID) error { q := dialect.Insert(t.table.table).Cols(t.idColumn.GetCol(), "primary", "file_id").Vals( goqu.Vals{id, primary, fileID}, ) @@ -724,7 +723,7 @@ func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool return nil } -func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimary bool, fileIDs []file.ID) error { +func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimary bool, fileIDs []models.FileID) error { for i, fk := range fileIDs { if err := t.insertJoin(ctx, id, firstPrimary && i == 0, fk); err != nil { return err @@ -734,7 +733,7 @@ func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimar return nil } -func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs []file.ID) error { +func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs []models.FileID) error { if err := t.destroy(ctx, []int{id}); err != nil { return err } @@ -744,7 +743,7 @@ func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs [] } // destroyJoins destroys all entries in the table with the provided fileIDs -func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []file.ID) error { +func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []models.FileID) error { q := dialect.Delete(t.table.table).Where(t.table.table.Col("file_id").In(fileIDs)) if _, err := exec(ctx, q); err != nil { @@ -754,7 +753,7 @@ func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []file.ID) return nil } -func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID file.ID) error { +func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID models.FileID) error { table := t.table.table q := dialect.Update(table).Prepared(true).Set(goqu.Record{ diff --git a/pkg/sqlite/tables.go b/pkg/sqlite/tables.go index 69dc1d6a89f..dc1eb505115 100644 --- a/pkg/sqlite/tables.go +++ b/pkg/sqlite/tables.go @@ -13,6 +13,7 @@ var ( imagesTagsJoinTable = goqu.T(imagesTagsTable) performersImagesJoinTable = goqu.T(performersImagesTable) imagesFilesJoinTable = goqu.T(imagesFilesTable) + imagesURLsJoinTable = goqu.T(imagesURLsTable) galleriesFilesJoinTable = goqu.T(galleriesFilesTable) galleriesTagsJoinTable = goqu.T(galleriesTagsTable) @@ -70,6 +71,14 @@ var ( }, fkColumn: performersImagesJoinTable.Col(performerIDColumn), } + + imagesURLsTableMgr = &orderedValueTable[string]{ + table: table{ + table: imagesURLsJoinTable, + idColumn: imagesURLsJoinTable.Col(imageIDColumn), + }, + valueColumn: imagesURLsJoinTable.Col(imageURLColumn), + } ) var ( diff --git a/pkg/sqlite/tag.go b/pkg/sqlite/tag.go index ce09da4464f..ace5f8346da 100644 --- a/pkg/sqlite/tag.go +++ b/pkg/sqlite/tag.go @@ -396,6 +396,20 @@ func (qb *TagStore) FindByChildTagID(ctx context.Context, parentID int) ([]*mode return qb.queryTags(ctx, query, args) } +func (qb *TagStore) CountByParentTagID(ctx context.Context, parentID int) (int, error) { + q := dialect.Select(goqu.COUNT("*")).From(goqu.T("tags")). + InnerJoin(goqu.T("tags_relations"), goqu.On(goqu.I("tags_relations.parent_id").Eq(goqu.I("tags.id")))). + Where(goqu.I("tags_relations.child_id").Eq(goqu.V(parentID))) // Pass the parentID here + return count(ctx, q) +} + +func (qb *TagStore) CountByChildTagID(ctx context.Context, childID int) (int, error) { + q := dialect.Select(goqu.COUNT("*")).From(goqu.T("tags")). + InnerJoin(goqu.T("tags_relations"), goqu.On(goqu.I("tags_relations.child_id").Eq(goqu.I("tags.id")))). + Where(goqu.I("tags_relations.parent_id").Eq(goqu.V(childID))) // Pass the childID here + return count(ctx, q) +} + func (qb *TagStore) Count(ctx context.Context) (int, error) { q := dialect.Select(goqu.COUNT("*")).From(qb.table()) return count(ctx, q) @@ -890,9 +904,9 @@ func (qb *TagStore) queryTags(ctx context.Context, query string, args []interfac return ret, nil } -func (qb *TagStore) queryTagPaths(ctx context.Context, query string, args []interface{}) (models.TagPaths, error) { +func (qb *TagStore) queryTagPaths(ctx context.Context, query string, args []interface{}) ([]*models.TagPath, error) { const single = false - var ret models.TagPaths + var ret []*models.TagPath if err := qb.queryFunc(ctx, query, args, single, func(r *sqlx.Rows) error { var f tagPathRow if err := r.StructScan(&f); err != nil { diff --git a/pkg/sqlite/values.go b/pkg/sqlite/values.go index be812275f89..5e196051bd2 100644 --- a/pkg/sqlite/values.go +++ b/pkg/sqlite/values.go @@ -1,9 +1,9 @@ package sqlite import ( - "github.com/stashapp/stash/pkg/file" - "gopkg.in/guregu/null.v4" + + "github.com/stashapp/stash/pkg/models" ) // null package does not provide methods to convert null.Int to int pointer @@ -33,27 +33,27 @@ func nullFloatPtr(i null.Float) *float64 { return &v } -func nullIntFolderIDPtr(i null.Int) *file.FolderID { +func nullIntFolderIDPtr(i null.Int) *models.FolderID { if !i.Valid { return nil } - v := file.FolderID(i.Int64) + v := models.FolderID(i.Int64) return &v } -func nullIntFileIDPtr(i null.Int) *file.ID { +func nullIntFileIDPtr(i null.Int) *models.FileID { if !i.Valid { return nil } - v := file.ID(i.Int64) + v := models.FileID(i.Int64) return &v } -func nullIntFromFileIDPtr(i *file.ID) null.Int { +func nullIntFromFileIDPtr(i *models.FileID) null.Int { if i == nil { return null.NewInt(0, false) } @@ -61,7 +61,7 @@ func nullIntFromFileIDPtr(i *file.ID) null.Int { return null.IntFrom(int64(*i)) } -func nullIntFromFolderIDPtr(i *file.FolderID) null.Int { +func nullIntFromFolderIDPtr(i *models.FolderID) null.Int { if i == nil { return null.NewInt(0, false) } diff --git a/pkg/studio/export.go b/pkg/studio/export.go index 2ad158c17e1..9d6d79299b4 100644 --- a/pkg/studio/export.go +++ b/pkg/studio/export.go @@ -11,15 +11,15 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type FinderImageAliasStashIDGetter interface { - Finder - GetImage(ctx context.Context, studioID int) ([]byte, error) +type FinderImageStashIDGetter interface { + models.StudioGetter models.AliasLoader models.StashIDLoader + GetImage(ctx context.Context, studioID int) ([]byte, error) } // ToJSON converts a Studio object into its JSON equivalent. -func ToJSON(ctx context.Context, reader FinderImageAliasStashIDGetter, studio *models.Studio) (*jsonschema.Studio, error) { +func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models.Studio) (*jsonschema.Studio, error) { newStudioJSON := jsonschema.Studio{ Name: studio.Name, URL: studio.URL, diff --git a/pkg/studio/import.go b/pkg/studio/import.go index 653dfce611f..1af5ec5c3e0 100644 --- a/pkg/studio/import.go +++ b/pkg/studio/import.go @@ -10,16 +10,15 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedStudio *models.Studio) error - UpdateImage(ctx context.Context, studioID int, image []byte) error +type ImporterReaderWriter interface { + models.StudioCreatorUpdater + FindByName(ctx context.Context, name string, nocase bool) (*models.Studio, error) } var ErrParentStudioNotExist = errors.New("parent studio does not exist") type Importer struct { - ReaderWriter NameFinderCreatorUpdater + ReaderWriter ImporterReaderWriter Input jsonschema.Studio MissingRefBehaviour models.ImportMissingRefEnum @@ -78,11 +77,10 @@ func (i *Importer) populateParentStudio(ctx context.Context) error { } func (i *Importer) createParentStudio(ctx context.Context, name string) (int, error) { - newStudio := &models.Studio{ - Name: name, - } + newStudio := models.NewStudio() + newStudio.Name = name - err := i.ReaderWriter.Create(ctx, newStudio) + err := i.ReaderWriter.Create(ctx, &newStudio) if err != nil { return 0, err } diff --git a/pkg/studio/query.go b/pkg/studio/query.go index ce3594eb17b..b20cec33109 100644 --- a/pkg/studio/query.go +++ b/pkg/studio/query.go @@ -6,21 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Finder interface { - Find(ctx context.Context, id int) (*models.Studio, error) -} - -type Queryer interface { - Query(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) ([]*models.Studio, int, error) -} - -type FinderQueryer interface { - Finder - Queryer - models.AliasLoader -} - -func ByName(ctx context.Context, qb Queryer, name string) (*models.Studio, error) { +func ByName(ctx context.Context, qb models.StudioQueryer, name string) (*models.Studio, error) { f := &models.StudioFilterType{ Name: &models.StringCriterionInput{ Value: name, @@ -44,7 +30,7 @@ func ByName(ctx context.Context, qb Queryer, name string) (*models.Studio, error return nil, nil } -func ByAlias(ctx context.Context, qb Queryer, alias string) (*models.Studio, error) { +func ByAlias(ctx context.Context, qb models.StudioQueryer, alias string) (*models.Studio, error) { f := &models.StudioFilterType{ Aliases: &models.StringCriterionInput{ Value: alias, diff --git a/pkg/studio/update.go b/pkg/studio/update.go index 0b159edcd12..a1a16a0c491 100644 --- a/pkg/studio/update.go +++ b/pkg/studio/update.go @@ -12,11 +12,6 @@ var ( ErrStudioOwnAncestor = errors.New("studio cannot be an ancestor of itself") ) -type NameFinderCreator interface { - FindByName(ctx context.Context, name string, nocase bool) (*models.Studio, error) - Create(ctx context.Context, newStudio *models.Studio) error -} - type NameExistsError struct { Name string } @@ -36,7 +31,7 @@ func (e *NameUsedByAliasError) Error() string { // EnsureStudioNameUnique returns an error if the studio name provided // is used as a name or alias of another existing tag. -func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb Queryer) error { +func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb models.StudioQueryer) error { // ensure name is unique sameNameStudio, err := ByName(ctx, qb, name) if err != nil { @@ -65,7 +60,7 @@ func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb Queryer return nil } -func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Queryer) error { +func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb models.StudioQueryer) error { for _, a := range aliases { if err := EnsureStudioNameUnique(ctx, id, a, qb); err != nil { return err @@ -75,11 +70,17 @@ func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Query return nil } +type ValidateModifyReader interface { + models.StudioGetter + models.StudioQueryer + models.AliasLoader +} + // Checks to make sure that: // 1. The studio exists locally // 2. The studio is not its own ancestor // 3. The studio's aliases are unique -func ValidateModify(ctx context.Context, s models.StudioPartial, qb FinderQueryer) error { +func ValidateModify(ctx context.Context, s models.StudioPartial, qb ValidateModifyReader) error { existing, err := qb.Find(ctx, s.ID) if err != nil { return err @@ -110,7 +111,7 @@ func ValidateModify(ctx context.Context, s models.StudioPartial, qb FinderQuerye return nil } -func validateParent(ctx context.Context, studioID int, newParentID int, qb FinderQueryer) error { +func validateParent(ctx context.Context, studioID int, newParentID int, qb models.StudioGetter) error { if newParentID == studioID { return ErrStudioOwnAncestor } diff --git a/pkg/tag/import.go b/pkg/tag/import.go index 67bdbc460ca..6905d15ad73 100644 --- a/pkg/tag/import.go +++ b/pkg/tag/import.go @@ -9,13 +9,9 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { +type ImporterReaderWriter interface { + models.TagCreatorUpdater FindByName(ctx context.Context, name string, nocase bool) (*models.Tag, error) - Create(ctx context.Context, newTag *models.Tag) error - Update(ctx context.Context, updatedTag *models.Tag) error - UpdateImage(ctx context.Context, tagID int, image []byte) error - UpdateAliases(ctx context.Context, tagID int, aliases []string) error - UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error } type ParentTagNotExistError struct { @@ -31,7 +27,7 @@ func (e ParentTagNotExistError) MissingParent() string { } type Importer struct { - ReaderWriter NameFinderCreatorUpdater + ReaderWriter ImporterReaderWriter Input jsonschema.Tag MissingRefBehaviour models.ImportMissingRefEnum @@ -155,9 +151,10 @@ func (i *Importer) getParents(ctx context.Context) ([]int, error) { } func (i *Importer) createParent(ctx context.Context, name string) (int, error) { - newTag := models.NewTag(name) + newTag := models.NewTag() + newTag.Name = name - err := i.ReaderWriter.Create(ctx, newTag) + err := i.ReaderWriter.Create(ctx, &newTag) if err != nil { return 0, err } diff --git a/pkg/tag/query.go b/pkg/tag/query.go index a048054d763..76567434d1a 100644 --- a/pkg/tag/query.go +++ b/pkg/tag/query.go @@ -6,15 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Finder interface { - Find(ctx context.Context, id int) (*models.Tag, error) -} - -type Queryer interface { - Query(ctx context.Context, tagFilter *models.TagFilterType, findFilter *models.FindFilterType) ([]*models.Tag, int, error) -} - -func ByName(ctx context.Context, qb Queryer, name string) (*models.Tag, error) { +func ByName(ctx context.Context, qb models.TagQueryer, name string) (*models.Tag, error) { f := &models.TagFilterType{ Name: &models.StringCriterionInput{ Value: name, @@ -38,7 +30,7 @@ func ByName(ctx context.Context, qb Queryer, name string) (*models.Tag, error) { return nil, nil } -func ByAlias(ctx context.Context, qb Queryer, alias string) (*models.Tag, error) { +func ByAlias(ctx context.Context, qb models.TagQueryer, alias string) (*models.Tag, error) { f := &models.TagFilterType{ Aliases: &models.StringCriterionInput{ Value: alias, diff --git a/pkg/tag/update.go b/pkg/tag/update.go index 3b0dbd4141e..dcb78bf9cab 100644 --- a/pkg/tag/update.go +++ b/pkg/tag/update.go @@ -7,11 +7,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type NameFinderCreator interface { - FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Tag, error) - Create(ctx context.Context, newTag *models.Tag) error -} - type NameExistsError struct { Name string } @@ -43,7 +38,7 @@ func (e *InvalidTagHierarchyError) Error() string { // EnsureTagNameUnique returns an error if the tag name provided // is used as a name or alias of another existing tag. -func EnsureTagNameUnique(ctx context.Context, id int, name string, qb Queryer) error { +func EnsureTagNameUnique(ctx context.Context, id int, name string, qb models.TagQueryer) error { // ensure name is unique sameNameTag, err := ByName(ctx, qb, name) if err != nil { @@ -72,7 +67,7 @@ func EnsureTagNameUnique(ctx context.Context, id int, name string, qb Queryer) e return nil } -func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Queryer) error { +func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb models.TagQueryer) error { for _, a := range aliases { if err := EnsureTagNameUnique(ctx, id, a, qb); err != nil { return err @@ -82,14 +77,14 @@ func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Query return nil } -type RelationshipGetter interface { +type RelationshipFinder interface { FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) FindByParentTagID(ctx context.Context, parentID int) ([]*models.Tag, error) } -func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipGetter) error { +func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipFinder) error { id := tag.ID allAncestors := make(map[int]*models.TagPath) allDescendants := make(map[int]*models.TagPath) @@ -177,7 +172,7 @@ func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs return nil } -func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipGetter) ([]int, []int, error) { +func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipFinder) ([]int, []int, error) { var mergedParents, mergedChildren []int allIds := append([]int{destination}, sources...) diff --git a/scripts/cross-compile.sh b/scripts/cross-compile.sh index 07d187587bd..2b7e2786fb9 100755 --- a/scripts/cross-compile.sh +++ b/scripts/cross-compile.sh @@ -2,7 +2,7 @@ COMPILER_CONTAINER="stashapp/compiler:7" -BUILD_DATE=`go run -mod=vendor scripts/getDate.go` +BUILD_DATE=`go run scripts/getDate.go` GITHASH=`git rev-parse --short HEAD` STASH_VERSION=`git describe --tags --exclude latest_develop` diff --git a/ui/v2.5/.eslintrc.json b/ui/v2.5/.eslintrc.json index f37f8028ca6..55e4d902dd8 100644 --- a/ui/v2.5/.eslintrc.json +++ b/ui/v2.5/.eslintrc.json @@ -53,10 +53,6 @@ "import/namespace": "off", "import/no-unresolved": "off", "react/display-name": "off", - "react-hooks/exhaustive-deps": [ - "error", - { "additionalHooks": "^(useDebounce)$" } - ], "react/prop-types": "off", "react/style-prop-object": [ "error", @@ -74,7 +70,7 @@ "prefer-destructuring": ["error", { "object": true, "array": false }], "@typescript-eslint/no-use-before-define": [ "error", - { "functions": false, "classes": true } + { "functions": false, "classes": false } ], "no-nested-ternary": "off" } diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 60b2d35f477..24039dfbad8 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -32,7 +32,6 @@ "@silvermine/videojs-airplay": "^1.2.0", "@silvermine/videojs-chromecast": "^1.4.1", "apollo-upload-client": "^17.0.0", - "axios": "^1.3.3", "base64-blob": "^1.4.1", "bootstrap": "^4.6.2", "classnames": "^2.3.2", @@ -69,6 +68,7 @@ "ua-parser-js": "^1.0.34", "universal-cookie": "^4.0.4", "video.js": "^7.21.3", + "videojs-abloop": "^1.2.0", "videojs-contrib-dash": "^5.1.1", "videojs-mobile-ui": "^0.8.0", "videojs-seek-buttons": "^3.0.1", diff --git a/ui/v2.5/src/@types/videojs-abloop.d.ts b/ui/v2.5/src/@types/videojs-abloop.d.ts new file mode 100644 index 00000000000..b44d9f50c6e --- /dev/null +++ b/ui/v2.5/src/@types/videojs-abloop.d.ts @@ -0,0 +1,35 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +declare module "videojs-abloop" { + import videojs from "video.js"; + + declare function abLoopPlugin( + window: Window & typeof globalThis, + player: videojs + ): abLoopPlugin.Plugin; + + declare namespace abLoopPlugin { + interface Options { + start: number | boolean; + end: number | boolean; + enabled: boolean; + loopIfBeforeStart: boolean; + loopIfAfterEnd: boolean; + pauseBeforeLooping: boolean; + pauseAfterLooping: boolean; + } + + class Plugin extends videojs.Plugin { + getOptions(): Options; + setOptions(o: Options): void; + } + } + + export = abLoopPlugin; + + declare module "video.js" { + interface VideoJsPlayer { + abLoopPlugin: abLoopPlugin.Plugin; + } + } +} diff --git a/ui/v2.5/src/components/FrontPage/Control.tsx b/ui/v2.5/src/components/FrontPage/Control.tsx index c655d9c3eff..3cb2cf02111 100644 --- a/ui/v2.5/src/components/FrontPage/Control.tsx +++ b/ui/v2.5/src/components/FrontPage/Control.tsx @@ -105,11 +105,11 @@ const SavedFilterResults: React.FC = ({ const filter = useMemo(() => { if (!data?.findSavedFilter) return; - const { mode, filter: filterJSON } = data.findSavedFilter; + const { mode } = data.findSavedFilter; const ret = new ListFilterModel(mode, config); ret.currentPage = 1; - ret.configureFromJSON(filterJSON); + ret.configureFromSavedFilter(data.findSavedFilter); ret.randomSeed = -1; return ret; }, [data?.findSavedFilter, config]); diff --git a/ui/v2.5/src/components/Galleries/GalleryCard.tsx b/ui/v2.5/src/components/Galleries/GalleryCard.tsx index 88fe37f2aae..c62b5b7833a 100644 --- a/ui/v2.5/src/components/Galleries/GalleryCard.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryCard.tsx @@ -5,7 +5,7 @@ import * as GQL from "src/core/generated-graphql"; import { GridCard } from "../Shared/GridCard"; import { HoverPopover } from "../Shared/HoverPopover"; import { Icon } from "../Shared/Icon"; -import { TagLink } from "../Shared/TagLink"; +import { SceneLink, TagLink } from "../Shared/TagLink"; import { TruncatedText } from "../Shared/TruncatedText"; import { PerformerPopoverButton } from "../Shared/PerformerPopoverButton"; import { PopoverCountButton } from "../Shared/PopoverCountButton"; @@ -31,7 +31,7 @@ export const GalleryCard: React.FC = (props) => { if (props.gallery.scenes.length === 0) return; const popoverContent = props.gallery.scenes.map((scene) => ( - + )); return ( @@ -52,7 +52,7 @@ export const GalleryCard: React.FC = (props) => { if (props.gallery.tags.length <= 0) return; const popoverContent = props.gallery.tags.map((tag) => ( - + )); return ( diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx index b423b11048c..e007f2f1f0f 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryAddPanel.tsx @@ -30,7 +30,7 @@ export const GalleryAddPanel: React.FC = ({ // if galleries is already present, then we modify it, otherwise add let galleryCriterion = filter.criteria.find((c) => { return c.criterionOption.type === "galleries"; - }) as GalleriesCriterion; + }) as GalleriesCriterion | undefined; if ( galleryCriterion && diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx index 463ced50611..83ffe2bc3d3 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryDetailPanel.tsx @@ -34,7 +34,7 @@ export const GalleryDetailPanel: React.FC = ({ function renderTags() { if (gallery.tags.length === 0) return; const tags = gallery.tags.map((tag) => ( - + )); return ( <> diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx index c0d037661f3..1701b5bc7e1 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -19,7 +19,6 @@ import { mutateReloadScrapers, } from "src/core/StashService"; import { - PerformerSelect, TagSelect, SceneSelect, StudioSelect, @@ -39,6 +38,10 @@ import { ConfigurationContext } from "src/hooks/Config"; import isEqual from "lodash-es/isEqual"; import { DateInput } from "src/components/Shared/DateInput"; import { handleUnsavedChanges } from "src/utils/navigation"; +import { + Performer, + PerformerSelect, +} from "src/components/Performers/PerformerSelect"; interface IProps { gallery: Partial; @@ -62,6 +65,8 @@ export const GalleryEditPanel: React.FC = ({ })) ); + const [performers, setPerformers] = useState([]); + const isNew = gallery.id === undefined; const { configuration: stashConfig } = React.useContext(ConfigurationContext); @@ -139,12 +144,24 @@ export const GalleryEditPanel: React.FC = ({ ); } + function onSetPerformers(items: Performer[]) { + setPerformers(items); + formik.setFieldValue( + "performer_ids", + items.map((item) => item.id) + ); + } + useRatingKeybinds( isVisible, stashConfig?.ui?.ratingSystemOptions?.type, setRating ); + useEffect(() => { + setPerformers(gallery.performers ?? []); + }, [gallery.performers]); + useEffect(() => { if (isVisible) { Mousetrap.bind("s s", () => { @@ -238,6 +255,7 @@ export const GalleryEditPanel: React.FC = ({ return ( { onScrapeDialogClosed(data); @@ -309,8 +327,15 @@ export const GalleryEditPanel: React.FC = ({ }); if (idPerfs.length > 0) { - const newIds = idPerfs.map((p) => p.stored_id); - formik.setFieldValue("performer_ids", newIds as string[]); + onSetPerformers( + idPerfs.map((p) => { + return { + id: p.stored_id!, + name: p.name ?? "", + alias_list: [], + }; + }) + ); } } @@ -472,13 +497,8 @@ export const GalleryEditPanel: React.FC = ({ - formik.setFieldValue( - "performer_ids", - items.map((item) => item.id) - ) - } - ids={formik.values.performer_ids} + onSelect={onSetPerformers} + values={performers} /> diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx index 18741102089..eefc92ee8b4 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryImagesPanel.tsx @@ -33,7 +33,7 @@ export const GalleryImagesPanel: React.FC = ({ // if galleries is already present, then we modify it, otherwise add let galleryCriterion = filter.criteria.find((c) => { return c.criterionOption.type === "galleries"; - }) as GalleriesCriterion; + }) as GalleriesCriterion | undefined; if ( galleryCriterion && diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx index 520f5bf4746..e1f1a42468f 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx @@ -1,180 +1,32 @@ import React, { useState } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; -import { - StudioSelect, - PerformerSelect, - TagSelect, -} from "src/components/Shared/Select"; +import { useIntl } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { ScrapeDialog, - ScrapeDialogRow, - ScrapeResult, ScrapedInputGroupRow, ScrapedTextAreaRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import clone from "lodash-es/clone"; import { - useStudioCreate, - usePerformerCreate, - useTagCreate, -} from "src/core/StashService"; -import { useToast } from "src/hooks/Toast"; -import { scrapedPerformerToCreateInput } from "src/core/performers"; - -function renderScrapedStudio( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string) => void -) { - const resultValue = isNew ? result.newValue : result.originalValue; - const value = resultValue ? [resultValue] : []; - - return ( - { - if (onChange) { - onChange(items[0]?.id); - } - }} - ids={value} - /> - ); -} - -function renderScrapedStudioRow( - title: string, - result: ScrapeResult, - onChange: (value: ScrapeResult) => void, - newStudio?: GQL.ScrapedStudio, - onCreateNew?: (value: GQL.ScrapedStudio) => void -) { - return ( - renderScrapedStudio(result)} - renderNewField={() => - renderScrapedStudio(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={newStudio ? [newStudio] : undefined} - onCreateNew={() => { - if (onCreateNew && newStudio) onCreateNew(newStudio); - }} - /> - ); -} - -function renderScrapedPerformers( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string[]) => void -) { - const resultValue = isNew ? result.newValue : result.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChange) { - onChange(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); -} - -function renderScrapedPerformersRow( - title: string, - result: ScrapeResult, - onChange: (value: ScrapeResult) => void, - newPerformers: GQL.ScrapedPerformer[], - onCreateNew?: (value: GQL.ScrapedPerformer) => void -) { - const performersCopy = newPerformers.map((p) => { - const name: string = p.name ?? ""; - return { ...p, name }; - }); - - return ( - renderScrapedPerformers(result)} - renderNewField={() => - renderScrapedPerformers(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={performersCopy} - onCreateNew={(i) => { - if (onCreateNew) onCreateNew(newPerformers[i]); - }} - /> - ); -} - -function renderScrapedTags( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string[]) => void -) { - const resultValue = isNew ? result.newValue : result.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChange) { - onChange(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); -} - -function renderScrapedTagsRow( - title: string, - result: ScrapeResult, - onChange: (value: ScrapeResult) => void, - newTags: GQL.ScrapedTag[], - onCreateNew?: (value: GQL.ScrapedTag) => void -) { - return ( - renderScrapedTags(result)} - renderNewField={() => - renderScrapedTags(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - newValues={newTags} - onChange={onChange} - onCreateNew={(i) => { - if (onCreateNew) onCreateNew(newTags[i]); - }} - /> - ); -} + ObjectListScrapeResult, + ScrapeResult, +} from "src/components/Shared/ScrapeDialog/scrapeResult"; +import { + ScrapedPerformersRow, + ScrapedStudioRow, + ScrapedTagsRow, +} from "src/components/Shared/ScrapeDialog/ScrapedObjectsRow"; +import { sortStoredIdObjects } from "src/utils/data"; +import { Performer } from "src/components/Performers/PerformerSelect"; +import { + useCreateScrapedPerformer, + useCreateScrapedStudio, + useCreateScrapedTag, +} from "src/components/Shared/ScrapeDialog/createObjects"; interface IGalleryScrapeDialogProps { gallery: Partial; + galleryPerformers: Performer[]; scraped: GQL.ScrapedGallery; onClose: (scrapedGallery?: GQL.ScrapedGallery) => void; @@ -247,10 +99,17 @@ export const GalleryScrapeDialog: React.FC = ( return ret; } - const [performers, setPerformers] = useState>( - new ScrapeResult( - sortIdList(props.gallery.performer_ids), - mapStoredIdObjects(props.scraped.performers ?? undefined) + const [performers, setPerformers] = useState< + ObjectListScrapeResult + >( + new ObjectListScrapeResult( + sortStoredIdObjects( + props.galleryPerformers.map((p) => ({ + stored_id: p.id, + name: p.name, + })) + ), + sortStoredIdObjects(props.scraped.performers ?? undefined) ) ); const [newPerformers, setNewPerformers] = useState( @@ -271,11 +130,25 @@ export const GalleryScrapeDialog: React.FC = ( new ScrapeResult(props.gallery.details, props.scraped.details) ); - const [createStudio] = useStudioCreate(); - const [createPerformer] = usePerformerCreate(); - const [createTag] = useTagCreate(); + const createNewStudio = useCreateScrapedStudio({ + scrapeResult: studio, + setScrapeResult: setStudio, + setNewObject: setNewStudio, + }); + + const createNewPerformer = useCreateScrapedPerformer({ + scrapeResult: performers, + setScrapeResult: setPerformers, + newObjects: newPerformers, + setNewObjects: setNewPerformers, + }); - const Toast = useToast(); + const createNewTag = useCreateScrapedTag({ + scrapeResult: tags, + setScrapeResult: setTags, + newObjects: newTags, + setNewObjects: setNewTags, + }); // don't show the dialog if nothing was scraped if ( @@ -290,122 +163,6 @@ export const GalleryScrapeDialog: React.FC = ( return <>; } - async function createNewStudio(toCreate: GQL.ScrapedStudio) { - try { - const result = await createStudio({ - variables: { - input: { - name: toCreate.name, - url: toCreate.url, - }, - }, - }); - - // set the new studio as the value - setStudio(studio.cloneWithValue(result.data!.studioCreate!.id)); - setNewStudio(undefined); - - Toast.success({ - content: ( - - {toCreate.name}, - }} - /> - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewPerformer(toCreate: GQL.ScrapedPerformer) { - const input = scrapedPerformerToCreateInput(toCreate); - - try { - const result = await createPerformer({ - variables: { input }, - }); - - // add the new performer to the new performers value - const performerClone = performers.cloneWithValue(performers.newValue); - if (!performerClone.newValue) { - performerClone.newValue = []; - } - performerClone.newValue.push(result.data!.performerCreate!.id); - setPerformers(performerClone); - - // remove the performer from the list - const newPerformersClone = newPerformers.concat(); - const pIndex = newPerformersClone.indexOf(toCreate); - newPerformersClone.splice(pIndex, 1); - - setNewPerformers(newPerformersClone); - - Toast.success({ - content: ( - - {toCreate.name}, - }} - /> - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewTag(toCreate: GQL.ScrapedTag) { - const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; - try { - const result = await createTag({ - variables: { - input: tagInput, - }, - }); - - // add the new tag to the new tags value - const tagClone = tags.cloneWithValue(tags.newValue); - if (!tagClone.newValue) { - tagClone.newValue = []; - } - tagClone.newValue.push(result.data!.tagCreate!.id); - setTags(tagClone); - - // remove the tag from the list - const newTagsClone = newTags.concat(); - const pIndex = newTagsClone.indexOf(toCreate); - newTagsClone.splice(pIndex, 1); - - setNewTags(newTagsClone); - - Toast.success({ - content: ( - - {toCreate.name}, - }} - /> - - ), - }); - } catch (e) { - Toast.error(e); - } - } - function makeNewScrapedItem(): GQL.ScrapedGalleryDataFragment { const newStudioValue = studio.getNewValue(); @@ -419,12 +176,7 @@ export const GalleryScrapeDialog: React.FC = ( name: "", } : undefined, - performers: performers.getNewValue()?.map((p) => { - return { - stored_id: p, - name: "", - }; - }), + performers: performers.getNewValue(), tags: tags.getNewValue()?.map((m) => { return { stored_id: m, @@ -454,27 +206,27 @@ export const GalleryScrapeDialog: React.FC = ( result={date} onChange={(value) => setDate(value)} /> - {renderScrapedStudioRow( - intl.formatMessage({ id: "studios" }), - studio, - (value) => setStudio(value), - newStudio, - createNewStudio - )} - {renderScrapedPerformersRow( - intl.formatMessage({ id: "performers" }), - performers, - (value) => setPerformers(value), - newPerformers, - createNewPerformer - )} - {renderScrapedTagsRow( - intl.formatMessage({ id: "tags" }), - tags, - (value) => setTags(value), - newTags, - createNewTag - )} + setStudio(value)} + newStudio={newStudio} + onCreateNew={createNewStudio} + /> + setPerformers(value)} + newObjects={newPerformers} + onCreateNew={createNewPerformer} + /> + setTags(value)} + newObjects={newTags} + onCreateNew={createNewTag} + /> = ( if (props.image.tags.length <= 0) return; const popoverContent = props.image.tags.map((tag) => ( - + )); return ( @@ -83,7 +83,7 @@ export const ImageCard: React.FC = ( if (props.image.galleries.length <= 0) return; const popoverContent = props.image.galleries.map((gallery) => ( - + )); return ( diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageDetailPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageDetailPanel.tsx index c4e840e2cbb..417d425cc7b 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageDetailPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageDetailPanel.tsx @@ -2,7 +2,7 @@ import React, { useMemo } from "react"; import { Link } from "react-router-dom"; import * as GQL from "src/core/generated-graphql"; import TextUtils from "src/utils/text"; -import { TagLink } from "src/components/Shared/TagLink"; +import { GalleryLink, TagLink } from "src/components/Shared/TagLink"; import { TruncatedText } from "src/components/Shared/TruncatedText"; import { PerformerCard } from "src/components/Performers/PerformerCard"; import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; @@ -24,7 +24,7 @@ export const ImageDetailPanel: React.FC = (props) => { function renderTags() { if (props.image.tags.length === 0) return; const tags = props.image.tags.map((tag) => ( - + )); return ( <> @@ -67,8 +67,8 @@ export const ImageDetailPanel: React.FC = (props) => { function renderGalleries() { if (props.image.galleries.length === 0) return; - const tags = props.image.galleries.map((gallery) => ( - + const galleries = props.image.galleries.map((gallery) => ( + )); return ( <> @@ -78,7 +78,7 @@ export const ImageDetailPanel: React.FC = (props) => { values={{ count: props.image.galleries.length }} /> - {tags} + {galleries} ); } diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx index 23e1a899679..8ff5fb6955e 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx @@ -4,13 +4,9 @@ import { FormattedMessage, useIntl } from "react-intl"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; import * as yup from "yup"; -import { - PerformerSelect, - TagSelect, - StudioSelect, -} from "src/components/Shared/Select"; +import { TagSelect, StudioSelect } from "src/components/Shared/Select"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; -import { URLField } from "src/components/Shared/URLField"; +import { URLListInput } from "src/components/Shared/URLField"; import { useToast } from "src/hooks/Toast"; import FormUtils from "src/utils/form"; import { useFormik } from "formik"; @@ -20,6 +16,11 @@ import { useRatingKeybinds } from "src/hooks/keybinds"; import { ConfigurationContext } from "src/hooks/Config"; import isEqual from "lodash-es/isEqual"; import { DateInput } from "src/components/Shared/DateInput"; +import { yupDateString, yupUniqueStringList } from "src/utils/yup"; +import { + Performer, + PerformerSelect, +} from "src/components/Performers/PerformerSelect"; interface IProps { image: GQL.ImageDataFragment; @@ -42,22 +43,12 @@ export const ImageEditPanel: React.FC = ({ const { configuration } = React.useContext(ConfigurationContext); + const [performers, setPerformers] = useState([]); + const schema = yup.object({ title: yup.string().ensure(), - url: yup.string().ensure(), - date: yup - .string() - .ensure() - .test({ - name: "date", - test: (value) => { - if (!value) return true; - if (!value.match(/^\d{4}-\d{2}-\d{2}$/)) return false; - if (Number.isNaN(Date.parse(value))) return false; - return true; - }, - message: intl.formatMessage({ id: "validation.date_invalid_form" }), - }), + urls: yupUniqueStringList("urls"), + date: yupDateString(intl), rating100: yup.number().nullable().defined(), studio_id: yup.string().required().nullable(), performer_ids: yup.array(yup.string().required()).defined(), @@ -66,7 +57,7 @@ export const ImageEditPanel: React.FC = ({ const initialValues = { title: image.title ?? "", - url: image?.url ?? "", + urls: image?.urls ?? [], date: image?.date ?? "", rating100: image.rating100 ?? null, studio_id: image.studio?.id ?? null, @@ -87,12 +78,24 @@ export const ImageEditPanel: React.FC = ({ formik.setFieldValue("rating100", v); } + function onSetPerformers(items: Performer[]) { + setPerformers(items); + formik.setFieldValue( + "performer_ids", + items.map((item) => item.id) + ); + } + useRatingKeybinds( true, configuration?.ui?.ratingSystemOptions?.type, setRating ); + useEffect(() => { + setPerformers(image.performers ?? []); + }, [image.performers]); + useEffect(() => { if (isVisible) { Mousetrap.bind("s s", () => { @@ -148,6 +151,14 @@ export const ImageEditPanel: React.FC = ({ if (isLoading) return ; + const urlsErrors = Array.isArray(formik.errors.urls) + ? formik.errors.urls[0] + : formik.errors.urls; + const urlsErrorMsg = urlsErrors + ? intl.formatMessage({ id: "validation.urls_must_be_unique" }) + : undefined; + const urlsErrorIdx = urlsErrors?.split(" ").map((e) => parseInt(e)); + return (
= ({
{renderTextField("title", intl.formatMessage({ id: "title" }))} - + - + - {}} - urlScrapable={() => { - return false; - }} - isInvalid={!!formik.getFieldMeta("url").error} + formik.setFieldValue("urls", value)} + errors={urlsErrorMsg} + errorIdx={urlsErrorIdx} /> @@ -249,13 +258,8 @@ export const ImageEditPanel: React.FC = ({ - formik.setFieldValue( - "performer_ids", - items.map((item) => item.id) - ) - } - ids={formik.values.performer_ids} + onSelect={onSetPerformers} + values={performers} /> diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx index 2b906c6d5ef..adf95d2f9f2 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageFileInfoPanel.tsx @@ -7,7 +7,7 @@ import * as GQL from "src/core/generated-graphql"; import { mutateImageSetPrimaryFile } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; import TextUtils from "src/utils/text"; -import { TextField, URLField } from "src/utils/field"; +import { TextField, URLField, URLsField } from "src/utils/field"; interface IFileInfoPanelProps { file: GQL.ImageFileDataFragment | GQL.VideoFileDataFragment; @@ -120,20 +120,11 @@ export const ImageFileInfoPanel: React.FC = ( if (props.image.visual_files.length === 1) { return ( <> - +
+ +
- {props.image.url ? ( -
- -
- ) : ( - "" - )} + ); } diff --git a/ui/v2.5/src/components/Images/ImageWallItem.tsx b/ui/v2.5/src/components/Images/ImageWallItem.tsx index f1d2856da52..8403b3a98da 100644 --- a/ui/v2.5/src/components/Images/ImageWallItem.tsx +++ b/ui/v2.5/src/components/Images/ImageWallItem.tsx @@ -44,6 +44,7 @@ export const ImageWallItem: React.FC = ( return ( ; @@ -175,7 +175,7 @@ const GenericCriterionEditor: React.FC = ({ ); } } - if (criterion.criterionOption instanceof PathCriterionOption) { + if (criterion instanceof PathCriterion) { return ( ); diff --git a/ui/v2.5/src/components/List/EditFilterDialog.tsx b/ui/v2.5/src/components/List/EditFilterDialog.tsx index 581fd31fb87..531af632cb3 100644 --- a/ui/v2.5/src/components/List/EditFilterDialog.tsx +++ b/ui/v2.5/src/components/List/EditFilterDialog.tsx @@ -14,7 +14,6 @@ import { Criterion, CriterionOption, } from "src/models/list-filter/criteria/criterion"; -import { makeCriteria } from "src/models/list-filter/criteria/factory"; import { FormattedMessage, useIntl } from "react-intl"; import { ConfigurationContext } from "src/hooks/Config"; import { ListFilterModel } from "src/models/list-filter/filter"; @@ -243,17 +242,11 @@ export const EditFilterDialog: React.FC = ({ }, [currentFilter.mode]); const criterionOptions = useMemo(() => { - const filteredOptions = filterOptions.criterionOptions.filter((o) => { - return o.type !== "none"; - }); - - filteredOptions.sort((a, b) => { + return [...filterOptions.criterionOptions].sort((a, b) => { return intl .formatMessage({ id: a.messageID }) .localeCompare(intl.formatMessage({ id: b.messageID })); }); - - return filteredOptions; }, [intl, filterOptions.criterionOptions]); const optionSelected = useCallback( @@ -270,11 +263,11 @@ export const EditFilterDialog: React.FC = ({ if (existing) { setCriterion(existing); } else { - const newCriterion = makeCriteria(configuration, option.type); + const newCriterion = filter.makeCriterion(option.type); setCriterion(newCriterion); } }, - [criteria, configuration] + [filter, criteria] ); const ui = (configuration?.ui ?? {}) as IUIConfig; diff --git a/ui/v2.5/src/components/List/Filters/DurationFilter.tsx b/ui/v2.5/src/components/List/Filters/DurationFilter.tsx index 772bb0137a4..59d8f0a0700 100644 --- a/ui/v2.5/src/components/List/Filters/DurationFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/DurationFilter.tsx @@ -17,67 +17,50 @@ export const DurationFilter: React.FC = ({ }) => { const intl = useIntl(); - function onChanged(valueAsNumber: number, property: "value" | "value2") { + function onChanged(v: number | undefined, property: "value" | "value2") { const { value } = criterion; - value[property] = valueAsNumber; + value[property] = v; onValueChanged(value); } - let equalsControl: JSX.Element | null = null; - if ( - criterion.modifier === CriterionModifier.Equals || - criterion.modifier === CriterionModifier.NotEquals - ) { - equalsControl = ( - - onChanged(v, "value")} - placeholder={intl.formatMessage({ id: "criterion.value" })} - /> - - ); - } + function renderTop() { + let placeholder: string; + if ( + criterion.modifier === CriterionModifier.GreaterThan || + criterion.modifier === CriterionModifier.Between || + criterion.modifier === CriterionModifier.NotBetween + ) { + placeholder = intl.formatMessage({ id: "criterion.greater_than" }); + } else if (criterion.modifier === CriterionModifier.LessThan) { + placeholder = intl.formatMessage({ id: "criterion.less_than" }); + } else { + placeholder = intl.formatMessage({ id: "criterion.value" }); + } - let lowerControl: JSX.Element | null = null; - if ( - criterion.modifier === CriterionModifier.GreaterThan || - criterion.modifier === CriterionModifier.Between || - criterion.modifier === CriterionModifier.NotBetween - ) { - lowerControl = ( + return ( onChanged(v, "value")} - placeholder={intl.formatMessage({ id: "criterion.greater_than" })} + value={criterion.value?.value} + setValue={(v) => onChanged(v, "value")} + placeholder={placeholder} /> ); } - let upperControl: JSX.Element | null = null; - if ( - criterion.modifier === CriterionModifier.LessThan || - criterion.modifier === CriterionModifier.Between || - criterion.modifier === CriterionModifier.NotBetween - ) { - upperControl = ( + function renderBottom() { + if ( + criterion.modifier !== CriterionModifier.Between && + criterion.modifier !== CriterionModifier.NotBetween + ) { + return; + } + + return ( - onChanged( - v, - criterion.modifier === CriterionModifier.LessThan - ? "value" - : "value2" - ) - } + value={criterion.value?.value2} + setValue={(v) => onChanged(v, "value2")} placeholder={intl.formatMessage({ id: "criterion.less_than" })} /> @@ -86,9 +69,8 @@ export const DurationFilter: React.FC = ({ return ( <> - {equalsControl} - {lowerControl} - {upperControl} + {renderTop()} + {renderBottom()} ); }; diff --git a/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx b/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx index bb262583881..abfb74ee78e 100644 --- a/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx @@ -13,20 +13,19 @@ interface IHierarchicalLabelValueFilterProps { export const HierarchicalLabelValueFilter: React.FC< IHierarchicalLabelValueFilterProps > = ({ criterion, onValueChanged }) => { + const { criterionOption } = criterion; + const { type, inputType } = criterionOption; + const intl = useIntl(); if ( - criterion.criterionOption.type !== "performers" && - criterion.criterionOption.type !== "studios" && - criterion.criterionOption.type !== "parent_studios" && - criterion.criterionOption.type !== "tags" && - criterion.criterionOption.type !== "sceneTags" && - criterion.criterionOption.type !== "performerTags" && - criterion.criterionOption.type !== "parentTags" && - criterion.criterionOption.type !== "childTags" && - criterion.criterionOption.type !== "movies" - ) + inputType !== "studios" && + inputType !== "tags" && + inputType !== "scene_tags" && + inputType !== "performer_tags" + ) { return null; + } const messages = defineMessages({ studio_depth: { @@ -51,10 +50,10 @@ export const HierarchicalLabelValueFilter: React.FC< } function criterionOptionTypeToIncludeID(): string { - if (criterion.criterionOption.type === "studios") { + if (inputType === "studios") { return "include-sub-studios"; } - if (criterion.criterionOption.type === "childTags") { + if (type === "children") { return "include-parent-tags"; } return "include-sub-tags"; @@ -62,9 +61,9 @@ export const HierarchicalLabelValueFilter: React.FC< function criterionOptionTypeToIncludeUIString(): MessageDescriptor { const optionType = - criterion.criterionOption.type === "studios" + inputType === "studios" ? "include_sub_studios" - : criterion.criterionOption.type === "childTags" + : type === "children" ? "include_parent_tags" : "include_sub_tags"; return { @@ -76,7 +75,7 @@ export const HierarchicalLabelValueFilter: React.FC< <> labeled.id)} diff --git a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx index f06e5c21bdc..13824e08b8b 100644 --- a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx @@ -13,18 +13,19 @@ export const LabeledIdFilter: React.FC = ({ criterion, onValueChanged, }) => { + const { criterionOption } = criterion; + const { inputType } = criterionOption; + if ( - criterion.criterionOption.type !== "performers" && - criterion.criterionOption.type !== "studios" && - criterion.criterionOption.type !== "parent_studios" && - criterion.criterionOption.type !== "tags" && - criterion.criterionOption.type !== "sceneTags" && - criterion.criterionOption.type !== "performerTags" && - criterion.criterionOption.type !== "parentTags" && - criterion.criterionOption.type !== "childTags" && - criterion.criterionOption.type !== "movies" - ) + inputType !== "performers" && + inputType !== "studios" && + inputType !== "scene_tags" && + inputType !== "performer_tags" && + inputType !== "tags" && + inputType !== "movies" + ) { return null; + } function onSelectionChanged(items: SelectObject[]) { onValueChanged( @@ -38,7 +39,7 @@ export const LabeledIdFilter: React.FC = ({ return ( labeled.id)} diff --git a/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx b/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx index 2c13eb57e81..3ce31bec761 100644 --- a/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx @@ -22,7 +22,7 @@ import { import { defineMessages, MessageDescriptor, useIntl } from "react-intl"; import { CriterionModifier } from "src/core/generated-graphql"; import { keyboardClickHandler } from "src/utils/keyboard"; -import { useDebouncedSetState } from "src/hooks/debounce"; +import { useDebounce } from "src/hooks/debounce"; import useFocus from "src/utils/focus"; interface ISelectedItem { @@ -192,7 +192,7 @@ export const ObjectsFilter = < const [query, setQuery] = useState(""); const [displayQuery, setDisplayQuery] = useState(query); - const debouncedSetQuery = useDebouncedSetState(setQuery, 250); + const debouncedSetQuery = useDebounce(setQuery, 250); const onQueryChange = useCallback( (input: string) => { setDisplayQuery(input); @@ -320,7 +320,7 @@ export const HierarchicalObjectsFilter = < if (criterion.criterionOption.type === "studios") { return "include-sub-studios"; } - if (criterion.criterionOption.type === "childTags") { + if (criterion.criterionOption.type === "children") { return "include-parent-tags"; } return "include-sub-tags"; @@ -330,7 +330,7 @@ export const HierarchicalObjectsFilter = < const optionType = criterion.criterionOption.type === "studios" ? "include_sub_studios" - : criterion.criterionOption.type === "childTags" + : criterion.criterionOption.type === "children" ? "include_parent_tags" : "include_sub_tags"; return { diff --git a/ui/v2.5/src/components/List/ItemList.tsx b/ui/v2.5/src/components/List/ItemList.tsx index e8cabe7cec1..8b3aa5898b1 100644 --- a/ui/v2.5/src/components/List/ItemList.tsx +++ b/ui/v2.5/src/components/List/ItemList.tsx @@ -619,8 +619,8 @@ export function makeItemList({ if (defaultFilter?.findDefaultFilter) { newFilter.currentPage = 1; try { - newFilter.configureFromJSON( - defaultFilter.findDefaultFilter.filter + newFilter.configureFromSavedFilter( + defaultFilter.findDefaultFilter ); } catch (err) { console.log(err); diff --git a/ui/v2.5/src/components/List/ListFilter.tsx b/ui/v2.5/src/components/List/ListFilter.tsx index 93b227828ad..cb692f96dfa 100644 --- a/ui/v2.5/src/components/List/ListFilter.tsx +++ b/ui/v2.5/src/components/List/ListFilter.tsx @@ -75,16 +75,12 @@ export const ListFilter: React.FC = ({ [filter, onFilterUpdate] ); - const searchCallback = useDebounce( - (value: string) => { - const newFilter = cloneDeep(filter); - newFilter.searchTerm = value; - newFilter.currentPage = 1; - onFilterUpdate(newFilter); - }, - [filter, onFilterUpdate], - 500 - ); + const searchCallback = useDebounce((value: string) => { + const newFilter = cloneDeep(filter); + newFilter.searchTerm = value; + newFilter.currentPage = 1; + onFilterUpdate(newFilter); + }, 500); const intl = useIntl(); diff --git a/ui/v2.5/src/components/List/SavedFilterList.tsx b/ui/v2.5/src/components/List/SavedFilterList.tsx index 8a5da04735e..caa1277d6b2 100644 --- a/ui/v2.5/src/components/List/SavedFilterList.tsx +++ b/ui/v2.5/src/components/List/SavedFilterList.tsx @@ -75,7 +75,9 @@ export const SavedFilterList: React.FC = ({ id, mode: filter.mode, name, - filter: filterCopy.makeSavedFilterJSON(), + find_filter: filterCopy.makeFindFilter(), + object_filter: filterCopy.makeSavedFindFilter(), + ui_options: filterCopy.makeUIOptions(), }, }, }); @@ -143,7 +145,9 @@ export const SavedFilterList: React.FC = ({ variables: { input: { mode: filter.mode, - filter: filterCopy.makeSavedFilterJSON(), + find_filter: filterCopy.makeFindFilter(), + object_filter: filterCopy.makeSavedFindFilter(), + ui_options: filterCopy.makeUIOptions(), }, }, }); @@ -166,7 +170,7 @@ export const SavedFilterList: React.FC = ({ newFilter.currentPage = 1; // #1795 - reset search term if not present in saved filter newFilter.searchTerm = ""; - newFilter.configureFromJSON(f.filter); + newFilter.configureFromSavedFilter(f); // #1507 - reset random seed when loaded newFilter.randomSeed = -1; diff --git a/ui/v2.5/src/components/Movies/MovieCard.tsx b/ui/v2.5/src/components/Movies/MovieCard.tsx index dc10872644b..206b0a8ca79 100644 --- a/ui/v2.5/src/components/Movies/MovieCard.tsx +++ b/ui/v2.5/src/components/Movies/MovieCard.tsx @@ -4,7 +4,7 @@ import * as GQL from "src/core/generated-graphql"; import { GridCard } from "../Shared/GridCard"; import { HoverPopover } from "../Shared/HoverPopover"; import { Icon } from "../Shared/Icon"; -import { TagLink } from "../Shared/TagLink"; +import { SceneLink } from "../Shared/TagLink"; import { TruncatedText } from "../Shared/TruncatedText"; import { FormattedMessage } from "react-intl"; import { RatingBanner } from "../Shared/RatingBanner"; @@ -36,7 +36,7 @@ export const MovieCard: React.FC = (props: IProps) => { if (props.movie.scenes.length === 0) return; const popoverContent = props.movie.scenes.map((scene) => ( - + )); return ( diff --git a/ui/v2.5/src/components/Movies/MovieDetails/MovieEditPanel.tsx b/ui/v2.5/src/components/Movies/MovieDetails/MovieEditPanel.tsx index 60717ad28ae..d4fd4c93913 100644 --- a/ui/v2.5/src/components/Movies/MovieDetails/MovieEditPanel.tsx +++ b/ui/v2.5/src/components/Movies/MovieDetails/MovieEditPanel.tsx @@ -135,10 +135,10 @@ export const MovieEditPanel: React.FC = ({ } if (state.duration) { - formik.setFieldValue( - "duration", - DurationUtils.stringToSeconds(state.duration) - ); + const seconds = DurationUtils.stringToSeconds(state.duration); + if (seconds !== undefined) { + formik.setFieldValue("duration", seconds); + } } if (state.date) { @@ -402,10 +402,8 @@ export const MovieEditPanel: React.FC = ({ { - formik.setFieldValue("duration", valueAsNumber ?? null); - }} + value={formik.values.duration ?? undefined} + setValue={(v) => formik.setFieldValue("duration", v ?? null)} /> diff --git a/ui/v2.5/src/components/Movies/MovieDetails/MovieScenesPanel.tsx b/ui/v2.5/src/components/Movies/MovieDetails/MovieScenesPanel.tsx index 22215de5a76..9bfbf8b55e2 100644 --- a/ui/v2.5/src/components/Movies/MovieDetails/MovieScenesPanel.tsx +++ b/ui/v2.5/src/components/Movies/MovieDetails/MovieScenesPanel.tsx @@ -18,7 +18,7 @@ export const MovieScenesPanel: React.FC = ({ // if movie is already present, then we modify it, otherwise add let movieCriterion = filter.criteria.find((c) => { return c.criterionOption.type === "movies"; - }) as MoviesCriterion; + }) as MoviesCriterion | undefined; if ( movieCriterion && diff --git a/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx b/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx index 37ccec8f637..f11edf6e8c3 100644 --- a/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx +++ b/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx @@ -3,16 +3,16 @@ import { useIntl } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { ScrapeDialog, - ScrapeResult, ScrapedInputGroupRow, ScrapedImageRow, ScrapeDialogRow, ScrapedTextAreaRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import { StudioSelect } from "src/components/Shared/Select"; import DurationUtils from "src/utils/duration"; import { useStudioCreate } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; function renderScrapedStudio( result: ScrapeResult, diff --git a/ui/v2.5/src/components/Performers/PerformerCard.tsx b/ui/v2.5/src/components/Performers/PerformerCard.tsx index c34b184a5bf..fab6acad865 100644 --- a/ui/v2.5/src/components/Performers/PerformerCard.tsx +++ b/ui/v2.5/src/components/Performers/PerformerCard.tsx @@ -168,7 +168,7 @@ export const PerformerCard: React.FC = ({ if (performer.tags.length <= 0) return; const popoverContent = performer.tags.map((tag) => ( - + )); return ( diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx index 015789fe173..8d42a6c2a22 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx @@ -57,6 +57,7 @@ interface IPerformerParams { } const validTabs = [ + "default", "scenes", "galleries", "images", @@ -65,7 +66,7 @@ const validTabs = [ ] as const; type TabKey = (typeof validTabs)[number]; -const defaultTab: TabKey = "scenes"; +const defaultTab: TabKey = "default"; function isTabKey(tab: string): tab is TabKey { return validTabs.includes(tab as TabKey); @@ -82,7 +83,7 @@ const PerformerPage: React.FC = ({ performer, tabKey }) => { const abbreviateCounter = uiConfig?.abbreviateCounters ?? false; const enableBackgroundImage = uiConfig?.enablePerformerBackgroundImage ?? false; - const showAllDetails = uiConfig?.showAllDetails ?? false; + const showAllDetails = uiConfig?.showAllDetails ?? true; const compactExpandedDetails = uiConfig?.compactExpandedDetails ?? false; const [collapsed, setCollapsed] = useState(!showAllDetails); @@ -117,11 +118,30 @@ const PerformerPage: React.FC = ({ performer, tabKey }) => { const [updatePerformer] = usePerformerUpdate(); const [deletePerformer, { loading: isDestroying }] = usePerformerDestroy(); + const populatedDefaultTab = useMemo(() => { + let ret: TabKey = "scenes"; + if (performer.scene_count == 0) { + if (performer.gallery_count != 0) { + ret = "galleries"; + } else if (performer.image_count != 0) { + ret = "images"; + } else if (performer.movie_count != 0) { + ret = "movies"; + } + } + + return ret; + }, [performer]); + + if (tabKey === defaultTab) { + tabKey = populatedDefaultTab; + } + function setTabKey(newTabKey: string | null) { - if (!newTabKey) newTabKey = defaultTab; + if (!newTabKey || newTabKey === defaultTab) newTabKey = populatedDefaultTab; if (newTabKey === tabKey) return; - if (newTabKey === defaultTab) { + if (newTabKey === populatedDefaultTab) { history.replace(`/performers/${performer.id}`); } else if (isTabKey(newTabKey)) { history.replace(`/performers/${performer.id}/${newTabKey}`); diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx index 84faefe6389..b9c9c2855e7 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx @@ -29,7 +29,7 @@ export const PerformerDetailsPanel: React.FC = ({ return (
    {(performer.tags ?? []).map((tag) => ( - + ))}
); diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx index 897bd17dd1f..2baf1d8711f 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx @@ -3,13 +3,12 @@ import { useIntl } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { ScrapeDialog, - ScrapeResult, ScrapedInputGroupRow, ScrapedImagesRow, ScrapeDialogRow, ScrapedTextAreaRow, ScrapedCountryRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import { useTagCreate } from "src/core/StashService"; import { Form } from "react-bootstrap"; import { TagSelect } from "src/components/Shared/Select"; @@ -26,6 +25,7 @@ import { stringToCircumcised, } from "src/utils/circumcised"; import { IStashBox } from "./PerformerStashBoxModal"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; function renderScrapedGender( result: ScrapeResult, diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeModal.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeModal.tsx index 31e093afc71..95874006e1c 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeModal.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeModal.tsx @@ -6,7 +6,7 @@ import * as GQL from "src/core/generated-graphql"; import { ModalComponent } from "src/components/Shared/Modal"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { useScrapePerformerList } from "src/core/StashService"; -import { useDebouncedSetState } from "src/hooks/debounce"; +import { useDebounce } from "src/hooks/debounce"; const CLASSNAME = "PerformerScrapeModal"; const CLASSNAME_LIST = `${CLASSNAME}-list`; @@ -33,7 +33,7 @@ const PerformerScrapeModal: React.FC = ({ const performers = data?.scrapeSinglePerformer ?? []; - const onInputChange = useDebouncedSetState(setQuery, 500); + const onInputChange = useDebounce(setQuery, 500); useEffect(() => inputRef.current?.focus(), []); diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerStashBoxModal.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerStashBoxModal.tsx index b171e00f663..773cd62d28d 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerStashBoxModal.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerStashBoxModal.tsx @@ -6,7 +6,7 @@ import * as GQL from "src/core/generated-graphql"; import { ModalComponent } from "src/components/Shared/Modal"; import { LoadingIndicator } from "src/components/Shared/LoadingIndicator"; import { stashboxDisplayName } from "src/utils/stashbox"; -import { useDebouncedSetState } from "src/hooks/debounce"; +import { useDebounce } from "src/hooks/debounce"; import { TruncatedText } from "src/components/Shared/TruncatedText"; import { stringToGender } from "src/utils/gender"; @@ -171,7 +171,7 @@ const PerformerStashBoxModal: React.FC = ({ const performers = data?.scrapeSinglePerformer ?? []; - const onInputChange = useDebouncedSetState(setQuery, 500); + const onInputChange = useDebounce(setQuery, 500); useEffect(() => inputRef.current?.focus(), []); diff --git a/ui/v2.5/src/components/Performers/PerformerSelect.tsx b/ui/v2.5/src/components/Performers/PerformerSelect.tsx new file mode 100644 index 00000000000..c721d652deb --- /dev/null +++ b/ui/v2.5/src/components/Performers/PerformerSelect.tsx @@ -0,0 +1,241 @@ +import React, { useEffect, useState } from "react"; +import { + OptionProps, + components as reactSelectComponents, + MultiValueGenericProps, + SingleValueProps, +} from "react-select"; + +import * as GQL from "src/core/generated-graphql"; +import { + usePerformerCreate, + queryFindPerformersByIDForSelect, + queryFindPerformersForSelect, +} from "src/core/StashService"; +import { ConfigurationContext } from "src/hooks/Config"; +import { useIntl } from "react-intl"; +import { defaultMaxOptionsShown, IUIConfig } from "src/core/config"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { + FilterSelectComponent, + IFilterIDProps, + IFilterProps, + IFilterValueProps, + Option as SelectOption, +} from "../Shared/FilterSelect"; +import { useCompare } from "src/hooks/state"; + +export type SelectObject = { + id: string; + name?: string | null; + title?: string | null; +}; + +export type Performer = Pick< + GQL.Performer, + "id" | "name" | "alias_list" | "disambiguation" +>; +type Option = SelectOption; + +export const PerformerSelect: React.FC< + IFilterProps & IFilterValueProps +> = (props) => { + const [createPerformer] = usePerformerCreate(); + + const { configuration } = React.useContext(ConfigurationContext); + const intl = useIntl(); + const maxOptionsShown = + (configuration?.ui as IUIConfig).maxOptionsShown ?? defaultMaxOptionsShown; + const defaultCreatable = + !configuration?.interface.disableDropdownCreate.performer ?? true; + + async function loadPerformers(input: string): Promise { + const filter = new ListFilterModel(GQL.FilterMode.Performers); + filter.searchTerm = input; + filter.currentPage = 1; + filter.itemsPerPage = maxOptionsShown; + filter.sortBy = "name"; + filter.sortDirection = GQL.SortDirectionEnum.Asc; + const query = await queryFindPerformersForSelect(filter); + return query.data.findPerformers.performers.map((performer) => ({ + value: performer.id, + object: performer, + })); + } + + const PerformerOption: React.FC> = ( + optionProps + ) => { + let thisOptionProps = optionProps; + + const { object } = optionProps.data; + + let { name } = object; + + // if name does not match the input value but an alias does, show the alias + const { inputValue } = optionProps.selectProps; + let alias: string | undefined = ""; + if (!name.toLowerCase().includes(inputValue.toLowerCase())) { + alias = object.alias_list?.find((a) => + a.toLowerCase().includes(inputValue.toLowerCase()) + ); + } + + thisOptionProps = { + ...optionProps, + children: ( + + {name} + {object.disambiguation && ( + {` (${object.disambiguation})`} + )} + {alias && {` (${alias})`}} + + ), + }; + + return ; + }; + + const PerformerMultiValueLabel: React.FC< + MultiValueGenericProps + > = (optionProps) => { + let thisOptionProps = optionProps; + + const { object } = optionProps.data; + + thisOptionProps = { + ...optionProps, + children: object.name, + }; + + return ; + }; + + const PerformerValueLabel: React.FC> = ( + optionProps + ) => { + let thisOptionProps = optionProps; + + const { object } = optionProps.data; + + thisOptionProps = { + ...optionProps, + children: object.name, + }; + + return ; + }; + + const onCreate = async (name: string) => { + const result = await createPerformer({ + variables: { input: { name } }, + }); + return { + value: result.data!.performerCreate!.id, + item: result.data!.performerCreate!, + message: "Created performer", + }; + }; + + const getNamedObject = (id: string, name: string) => { + return { + id, + name, + alias_list: [], + }; + }; + + const isValidNewOption = (inputValue: string, options: Performer[]) => { + if (!inputValue) { + return false; + } + + if ( + options.some((o) => { + return ( + o.name.toLowerCase() === inputValue.toLowerCase() || + o.alias_list?.some( + (a) => a.toLowerCase() === inputValue.toLowerCase() + ) + ); + }) + ) { + return false; + } + + return true; + }; + + return ( + + {...props} + loadOptions={loadPerformers} + getNamedObject={getNamedObject} + isValidNewOption={isValidNewOption} + components={{ + Option: PerformerOption, + MultiValueLabel: PerformerMultiValueLabel, + SingleValue: PerformerValueLabel, + }} + isMulti={props.isMulti ?? false} + creatable={props.creatable ?? defaultCreatable} + onCreate={onCreate} + placeholder={ + props.noSelectionString ?? + intl.formatMessage( + { id: "actions.select_entity" }, + { entityType: intl.formatMessage({ id: "performer" }) } + ) + } + /> + ); +}; + +export const PerformerIDSelect: React.FC< + IFilterProps & IFilterIDProps +> = (props) => { + const { ids, onSelect: onSelectValues } = props; + + const [values, setValues] = useState([]); + const idsChanged = useCompare(ids); + + function onSelect(items: Performer[]) { + setValues(items); + onSelectValues?.(items); + } + + async function loadObjectsByID(idsToLoad: string[]): Promise { + const performerIDs = idsToLoad.map((id) => parseInt(id)); + const query = await queryFindPerformersByIDForSelect(performerIDs); + const { performers: loadedPerformers } = query.data.findPerformers; + + return loadedPerformers; + } + + useEffect(() => { + if (!idsChanged) { + return; + } + + if (!ids || ids?.length === 0) { + setValues([]); + return; + } + + // load the values if we have ids and they haven't been loaded yet + const filteredValues = values.filter((v) => ids.includes(v.id.toString())); + if (filteredValues.length === ids.length) { + return; + } + + const load = async () => { + const items = await loadObjectsByID(ids); + setValues(items); + }; + + load(); + }, [ids, idsChanged, values]); + + return ; +}; diff --git a/ui/v2.5/src/components/Performers/styles.scss b/ui/v2.5/src/components/Performers/styles.scss index 4a0ec524ac2..4451728043b 100644 --- a/ui/v2.5/src/components/Performers/styles.scss +++ b/ui/v2.5/src/components/Performers/styles.scss @@ -223,3 +223,7 @@ content: ""; } } + +.react-select .alias { + font-weight: bold; +} diff --git a/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx b/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx index c45d1b29362..d64d15a1585 100644 --- a/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx +++ b/ui/v2.5/src/components/SceneDuplicateChecker/SceneDuplicateChecker.tsx @@ -1,9 +1,10 @@ -import React, { useState } from "react"; +import React, { useMemo, useState } from "react"; import { Button, ButtonGroup, Card, Col, + Dropdown, Form, OverlayTrigger, Row, @@ -18,7 +19,12 @@ import { LoadingIndicator } from "../Shared/LoadingIndicator"; import { ErrorMessage } from "../Shared/ErrorMessage"; import { HoverPopover } from "../Shared/HoverPopover"; import { Icon } from "../Shared/Icon"; -import { TagLink } from "../Shared/TagLink"; +import { + GalleryLink, + MovieLink, + SceneMarkerLink, + TagLink, +} from "../Shared/TagLink"; import { SweatDrops } from "../Shared/SweatDrops"; import { Pagination } from "src/components/List/Pagination"; import TextUtils from "src/utils/text"; @@ -46,7 +52,6 @@ const defaultDurationDiff = "1"; export const SceneDuplicateChecker: React.FC = () => { const intl = useIntl(); const history = useHistory(); - const query = new URLSearchParams(history.location.search); const currentPage = Number.parseInt(query.get("page") ?? "1", 10); const pageSize = Number.parseInt(query.get("size") ?? "20", 10); @@ -59,9 +64,12 @@ export const SceneDuplicateChecker: React.FC = () => { const [isMultiDelete, setIsMultiDelete] = useState(false); const [deletingScenes, setDeletingScenes] = useState(false); const [editingScenes, setEditingScenes] = useState(false); + const [chkSafeSelect, setChkSafeSelect] = useState(true); + const [checkedScenes, setCheckedScenes] = useState>( {} ); + const { data, loading, refetch } = GQL.useFindDuplicateScenesQuery({ fetchPolicy: "no-cache", variables: { @@ -69,6 +77,9 @@ export const SceneDuplicateChecker: React.FC = () => { duration_diff: durationDiff, }, }); + + const scenes = data?.findDuplicateScenes ?? []; + const { data: missingPhash } = GQL.useFindScenesQuery({ variables: { filter: { @@ -91,10 +102,27 @@ export const SceneDuplicateChecker: React.FC = () => { const [mergeScenes, setMergeScenes] = useState<{ id: string; title: string }[]>(); + const pageOptions = useMemo(() => { + const pageSizes = [ + 10, 20, 30, 40, 50, 100, 150, 200, 250, 500, 750, 1000, 1250, 1500, + ]; + + const filteredSizes = pageSizes.filter((s, i) => { + return scenes.length > s || i == 0 || scenes.length > pageSizes[i - 1]; + }); + + return filteredSizes.map((size) => { + return ( + + ); + }); + }, [scenes.length]); + if (loading) return ; if (!data) return ; - const scenes = data?.findDuplicateScenes ?? []; const filteredScenes = scenes.slice( (currentPage - 1) * pageSize, currentPage * pageSize @@ -116,6 +144,16 @@ export const SceneDuplicateChecker: React.FC = () => { history.push({ search: newQuery.toString() }); }; + const resetCheckboxSelection = () => { + const updatedScenes: Record = {}; + + Object.keys(checkedScenes).forEach((sceneKey) => { + updatedScenes[sceneKey] = false; + }); + + setCheckedScenes(updatedScenes); + }; + function onDeleteDialogClosed(deleted: boolean) { setDeletingScenes(false); if (deleted) { @@ -123,8 +161,102 @@ export const SceneDuplicateChecker: React.FC = () => { refetch(); if (isMultiDelete) setCheckedScenes({}); } + resetCheckboxSelection(); + } + + const findLargestScene = (group: GQL.SlimSceneDataFragment[]) => { + // Get total size of a scene + const totalSize = (scene: GQL.SlimSceneDataFragment) => { + return scene.files.reduce((sum: number, f) => sum + (f.size || 0), 0); + }; + // Find scene object with maximum total size + return group.reduce((largest, scene) => { + const largestSize = totalSize(largest); + const currentSize = totalSize(scene); + return currentSize > largestSize ? scene : largest; + }); + }; + + // Helper to get file date + + const findFirstFileByAge = ( + oldest: boolean, + compareScenes: GQL.SlimSceneDataFragment[] + ) => { + let selectedFile: GQL.VideoFileDataFragment; + let oldestTimestamp: Date | undefined = undefined; + + // Loop through all files + for (const file of compareScenes.flatMap((s) => s.files)) { + // Get timestamp + const timestamp: Date = new Date(file.mod_time); + + // Check if current file is oldest + if (oldest) { + if (oldestTimestamp === undefined || timestamp < oldestTimestamp) { + oldestTimestamp = timestamp; + selectedFile = file; + } + } else { + if (oldestTimestamp === undefined || timestamp > oldestTimestamp) { + oldestTimestamp = timestamp; + selectedFile = file; + } + } + } + + // Find scene with oldest file + return compareScenes.find((s) => + s.files.some((f) => f.id === selectedFile.id) + ); + }; + + function checkSameCodec(codecGroup: GQL.SlimSceneDataFragment[]) { + const codecs = codecGroup.map((s) => s.files[0]?.video_codec); + return new Set(codecs).size === 1; } + const onSelectLargestClick = () => { + setSelectedScenes([]); + const checkedArray: Record = {}; + + filteredScenes.forEach((group) => { + if (chkSafeSelect && !checkSameCodec(group)) { + return; + } + // Find largest scene in group a + const largest = findLargestScene(group); + group.forEach((scene) => { + if (scene !== largest) { + checkedArray[scene.id] = true; + } + }); + }); + + setCheckedScenes(checkedArray); + }; + + const onSelectByAge = (oldest: boolean) => { + setSelectedScenes([]); + + const checkedArray: Record = {}; + + filteredScenes.forEach((group) => { + if (chkSafeSelect && !checkSameCodec(group)) { + return; + } + + const oldestScene = findFirstFileByAge(oldest, group); + group.forEach((scene) => { + if (scene !== oldestScene) { + checkedArray[scene.id] = true; + } + }); + }); + + setCheckedScenes(checkedArray); + }; + const handleCheck = (checked: boolean, sceneID: string) => { setCheckedScenes({ ...checkedScenes, [sceneID]: checked }); }; @@ -144,6 +276,7 @@ export const SceneDuplicateChecker: React.FC = () => { function onEdit() { setSelectedScenes(scenes.flat().filter((s) => checkedScenes[s.id])); setEditingScenes(true); + resetCheckboxSelection(); } const renderFilesize = (filesize: number | null | undefined) => { @@ -221,7 +354,7 @@ export const SceneDuplicateChecker: React.FC = () => { src={sceneMovie.movie.front_image_path ?? ""} /> - { if (scene.scene_markers.length <= 0) return; const popoverContent = scene.scene_markers.map((marker) => { - const markerPopover = { ...marker, scene: { id: scene.id } }; - return ; + const markerWithScene = { ...marker, scene: { id: scene.id } }; + return ; }); return ( @@ -282,7 +415,7 @@ export const SceneDuplicateChecker: React.FC = () => { if (scene.galleries.length <= 0) return; const popoverContent = scene.galleries.map((gallery) => ( - + )); return ( @@ -395,9 +528,10 @@ export const SceneDuplicateChecker: React.FC = () => { currentPage={currentPage} totalItems={scenes.length} metadataByline={[]} - onChangePage={(newPage) => - setQuery({ page: newPage === 1 ? undefined : newPage }) - } + onChangePage={(newPage) => { + setQuery({ page: newPage === 1 ? undefined : newPage }); + resetCheckboxSelection(); + }} /> { ? undefined : e.currentTarget.value, }); + resetCheckboxSelection(); }} > - - - - - + {pageOptions}
); @@ -572,6 +703,54 @@ export const SceneDuplicateChecker: React.FC = () => { + + + + + + + + + resetCheckboxSelection()}> + {intl.formatMessage({ id: "dupe_check.select_none" })} + + + onSelectLargestClick()}> + {intl.formatMessage({ + id: "dupe_check.select_all_but_largest_file", + })} + + + onSelectByAge(true)}> + {intl.formatMessage({ + id: "dupe_check.select_oldest", + })} + + + onSelectByAge(false)}> + {intl.formatMessage({ + id: "dupe_check.select_youngest", + })} + + + + + + + { + setChkSafeSelect(e.target.checked); + resetCheckboxSelection(); + }} + /> + + {maybeRenderMissingPhashWarning()} @@ -621,6 +800,7 @@ export const SceneDuplicateChecker: React.FC = () => { > handleCheck(e.currentTarget.checked, scene.id) } @@ -641,15 +821,36 @@ export const SceneDuplicateChecker: React.FC = () => { src={scene.paths.sprite ?? ""} alt="" width={100} + style={{ + border: checkedScenes[scene.id] + ? "2px solid red" + : "", + }} />

- + + {" "} {scene.title ? scene.title - : TextUtils.fileNameFromPath(file?.path ?? "")} + : TextUtils.fileNameFromPath( + file?.path ?? "" + )}{" "}

{file?.path ?? ""}

diff --git a/ui/v2.5/src/components/SceneDuplicateChecker/styles.scss b/ui/v2.5/src/components/SceneDuplicateChecker/styles.scss index 9177a9367c9..750e4466fdd 100644 --- a/ui/v2.5/src/components/SceneDuplicateChecker/styles.scss +++ b/ui/v2.5/src/components/SceneDuplicateChecker/styles.scss @@ -8,7 +8,8 @@ } .separator { - height: 50px; + border-top: 1px solid white; + height: 10px; } .form-group .row { diff --git a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx index bb28f61f3dc..b7df466c08e 100644 --- a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx +++ b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx @@ -23,12 +23,6 @@ import "./big-buttons"; import "./track-activity"; import "./vrmode"; import cx from "classnames"; -// @ts-ignore -import airplay from "@silvermine/videojs-airplay"; -// @ts-ignore -import chromecast from "@silvermine/videojs-chromecast"; -airplay(videojs); -chromecast(videojs); import { useSceneSaveActivity, useSceneIncrementPlayCount, @@ -46,6 +40,17 @@ import { languageMap } from "src/utils/caption"; import { VIDEO_PLAYER_ID } from "./util"; import { IUIConfig } from "src/core/config"; +// @ts-ignore +import airplay from "@silvermine/videojs-airplay"; +// @ts-ignore +import chromecast from "@silvermine/videojs-chromecast"; +import abLoopPlugin from "videojs-abloop"; + +// register videojs plugins +airplay(videojs); +chromecast(videojs); +abLoopPlugin(window, videojs); + function handleHotkeys(player: VideoJsPlayer, event: videojs.KeyboardEvent) { function seekStep(step: number) { const time = player.currentTime() + step; @@ -73,6 +78,21 @@ function handleHotkeys(player: VideoJsPlayer, event: videojs.KeyboardEvent) { player.currentTime(time); } + function toggleABLooping() { + const opts = player.abLoopPlugin.getOptions(); + if (!opts.start) { + opts.start = player.currentTime(); + } else if (!opts.end) { + opts.end = player.currentTime(); + opts.enabled = true; + } else { + opts.start = 0; + opts.end = 0; + opts.enabled = false; + } + player.abLoopPlugin.setOptions(opts); + } + let seekFactor = 10; if (event.shiftKey) { seekFactor = 5; @@ -111,6 +131,9 @@ function handleHotkeys(player: VideoJsPlayer, event: videojs.KeyboardEvent) { if (player.isFullscreen()) player.exitFullscreen(); else player.requestFullscreen(); break; + case 76: // l + toggleABLooping(); + break; case 38: // up arrow player.volume(player.volume() + 0.1); break; @@ -340,6 +363,16 @@ export const ScenePlayer: React.FC = ({ skipButtons: {}, trackActivity: {}, vrMenu: {}, + abLoopPlugin: { + start: 0, + end: false, + enabled: false, + loopIfBeforeStart: true, + loopIfAfterEnd: true, + pauseAfterLooping: false, + pauseBeforeLooping: false, + createButtons: uiConfig?.showAbLoopControls ?? false, + }, }, }; @@ -372,7 +405,8 @@ export const ScenePlayer: React.FC = ({ sceneId.current = undefined; }; // empty deps - only init once - }, []); + // showAbLoopControls is necessary to re-init the player when the config changes + }, [uiConfig?.showAbLoopControls]); useEffect(() => { const player = getPlayer(); diff --git a/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx b/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx index 68dfbb406da..c6cf120fba1 100644 --- a/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx +++ b/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx @@ -6,15 +6,14 @@ import React, { useCallback, } from "react"; import { Button } from "react-bootstrap"; -import axios from "axios"; import * as GQL from "src/core/generated-graphql"; import TextUtils from "src/utils/text"; -import { WebVTT } from "videojs-vtt.js"; import { Icon } from "src/components/Shared/Icon"; import { faChevronRight, faChevronLeft, } from "@fortawesome/free-solid-svg-icons"; +import { useSpriteInfo } from "src/hooks/sprite"; interface IScenePlayerScrubberProps { file: GQL.VideoFileDataFragment; @@ -29,42 +28,6 @@ interface ISceneSpriteItem { time: string; } -interface ISceneSpriteInfo { - url: string; - start: number; - end: number; - x: number; - y: number; - w: number; - h: number; -} - -async function fetchSpriteInfo(vttPath: string) { - const response = await axios.get(vttPath, { responseType: "text" }); - - const sprites: ISceneSpriteInfo[] = []; - - const parser = new WebVTT.Parser(window, WebVTT.StringDecoder()); - parser.oncue = (cue: VTTCue) => { - const match = cue.text.match(/^([^#]*)#xywh=(\d+),(\d+),(\d+),(\d+)$/i); - if (!match) return; - - sprites.push({ - url: new URL(match[1], vttPath).href, - start: cue.startTime, - end: cue.endTime, - x: Number(match[2]), - y: Number(match[3]), - w: Number(match[4]), - h: Number(match[5]), - }); - }; - parser.parse(response.data); - parser.flush(); - - return sprites; -} - export const ScenePlayerScrubber: React.FC = ({ file, scene, @@ -119,34 +82,32 @@ export const ScenePlayerScrubber: React.FC = ({ [onSeek, file.duration, scrubWidth] ); + const spriteInfo = useSpriteInfo(scene.paths.vtt ?? undefined); const [spriteItems, setSpriteItems] = useState(); useEffect(() => { - if (!scene.paths.vtt) return; - fetchSpriteInfo(scene.paths.vtt).then((sprites) => { - if (!sprites) return; - let totalWidth = 0; - const newSprites = sprites?.map((sprite, index) => { - totalWidth += sprite.w; - const left = sprite.w * index; - const style = { - width: `${sprite.w}px`, - height: `${sprite.h}px`, - backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, - backgroundImage: `url(${sprite.url})`, - left: `${left}px`, - }; - const start = TextUtils.secondsToTimestamp(sprite.start); - const end = TextUtils.secondsToTimestamp(sprite.end); - return { - style, - time: `${start} - ${end}`, - }; - }); - setScrubWidth(totalWidth); - setSpriteItems(newSprites); + if (!spriteInfo) return; + let totalWidth = 0; + const newSprites = spriteInfo?.map((sprite, index) => { + totalWidth += sprite.w; + const left = sprite.w * index; + const style = { + width: `${sprite.w}px`, + height: `${sprite.h}px`, + backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, + backgroundImage: `url(${sprite.url})`, + left: `${left}px`, + }; + const start = TextUtils.secondsToTimestamp(sprite.start); + const end = TextUtils.secondsToTimestamp(sprite.end); + return { + style, + time: `${start} - ${end}`, + }; }); - }, [scene]); + setScrubWidth(totalWidth); + setSpriteItems(newSprites); + }, [spriteInfo]); useEffect(() => { const onResize = (entries: ResizeObserverEntry[]) => { diff --git a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx new file mode 100644 index 00000000000..3155f2009e1 --- /dev/null +++ b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx @@ -0,0 +1,176 @@ +import React, { useRef, useMemo, useState, useLayoutEffect } from "react"; +import { useSpriteInfo } from "src/hooks/sprite"; +import { useThrottle } from "src/hooks/throttle"; +import TextUtils from "src/utils/text"; + +interface IHoverScrubber { + totalSprites: number; + activeIndex: number | undefined; + setActiveIndex: (index: number | undefined) => void; + onClick?: () => void; +} + +const HoverScrubber: React.FC = ({ + totalSprites, + activeIndex, + setActiveIndex, + onClick, +}) => { + function getActiveIndex(e: React.MouseEvent) { + const { width } = e.currentTarget.getBoundingClientRect(); + const x = e.nativeEvent.offsetX; + + const i = Math.floor((x / width) * totalSprites); + + // clamp to [0, totalSprites) + if (i < 0) return 0; + if (i >= totalSprites) return totalSprites - 1; + return i; + } + + function onMouseMove(e: React.MouseEvent) { + const relatedTarget = e.currentTarget; + + if (relatedTarget !== e.target) return; + + setActiveIndex(getActiveIndex(e)); + } + + function onMouseLeave() { + setActiveIndex(undefined); + } + + function onScrubberClick(e: React.MouseEvent) { + if (!onClick) return; + + const relatedTarget = e.currentTarget; + + if (relatedTarget !== e.target) return; + + e.preventDefault(); + onClick(); + } + + const indicatorStyle = useMemo(() => { + if (activeIndex === undefined || !totalSprites) return {}; + + const width = (activeIndex / totalSprites) * 100; + + return { + width: `${width}%`, + }; + }, [activeIndex, totalSprites]); + + return ( +
+
+
+ {activeIndex !== undefined && ( +
+ )} +
+
+ ); +}; + +interface IScenePreviewProps { + vttPath: string | undefined; + onClick?: (timestamp: number) => void; +} + +function scaleToFit(dimensions: { w: number; h: number }, bounds: DOMRect) { + const rw = bounds.width / dimensions.w; + const rh = bounds.height / dimensions.h; + + // for consistency, use max by default and min for portrait + if (dimensions.w > dimensions.h) { + return Math.max(rw, rh); + } + + return Math.min(rw, rh); +} + +export const PreviewScrubber: React.FC = ({ + vttPath, + onClick, +}) => { + const imageParentRef = useRef(null); + const [style, setStyle] = useState({}); + + const [activeIndex, setActiveIndex] = useState(); + + const debounceSetActiveIndex = useThrottle(setActiveIndex, 50); + + const spriteInfo = useSpriteInfo(vttPath); + + const sprite = useMemo(() => { + if (!spriteInfo || activeIndex === undefined) { + return undefined; + } + return spriteInfo[activeIndex]; + }, [activeIndex, spriteInfo]); + + useLayoutEffect(() => { + const imageParent = imageParentRef.current; + + if (!sprite || !imageParent) { + return setStyle({}); + } + + const clientRect = imageParent.getBoundingClientRect(); + const scale = scaleToFit(sprite, clientRect); + + setStyle({ + backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, + backgroundImage: `url(${sprite.url})`, + width: `${sprite.w}px`, + height: `${sprite.h}px`, + transform: `scale(${scale})`, + }); + }, [sprite]); + + const currentTime = useMemo(() => { + if (!sprite) return undefined; + + const start = TextUtils.secondsToTimestamp(sprite.start); + + return start; + }, [sprite]); + + function onScrubberClick() { + if (!sprite || !onClick) { + return; + } + + onClick(sprite.start); + } + + if (!spriteInfo) return null; + + return ( +
+ {sprite && ( +
+
+ {currentTime !== undefined && ( +
{currentTime}
+ )} +
+ )} + debounceSetActiveIndex(i)} + onClick={onScrubberClick} + /> +
+ ); +}; diff --git a/ui/v2.5/src/components/Scenes/SceneCard.tsx b/ui/v2.5/src/components/Scenes/SceneCard.tsx index 190c4b4697f..0672ae4a61f 100644 --- a/ui/v2.5/src/components/Scenes/SceneCard.tsx +++ b/ui/v2.5/src/components/Scenes/SceneCard.tsx @@ -1,10 +1,15 @@ import React, { useEffect, useMemo, useRef } from "react"; import { Button, ButtonGroup } from "react-bootstrap"; -import { Link } from "react-router-dom"; +import { Link, useHistory } from "react-router-dom"; import cx from "classnames"; import * as GQL from "src/core/generated-graphql"; import { Icon } from "../Shared/Icon"; -import { TagLink } from "../Shared/TagLink"; +import { + GalleryLink, + TagLink, + MovieLink, + SceneMarkerLink, +} from "../Shared/TagLink"; import { HoverPopover } from "../Shared/HoverPopover"; import { SweatDrops } from "../Shared/SweatDrops"; import { TruncatedText } from "../Shared/TruncatedText"; @@ -25,12 +30,15 @@ import { faTag, } from "@fortawesome/free-solid-svg-icons"; import { objectPath, objectTitle } from "src/core/files"; +import { PreviewScrubber } from "./PreviewScrubber"; interface IScenePreviewProps { isPortrait: boolean; image?: string; video?: string; soundActive: boolean; + vttPath?: string; + onScrubberClick?: (timestamp: number) => void; } export const ScenePreview: React.FC = ({ @@ -38,6 +46,8 @@ export const ScenePreview: React.FC = ({ video, isPortrait, soundActive, + vttPath, + onScrubberClick, }) => { const videoEl = useRef(null); @@ -72,6 +82,7 @@ export const ScenePreview: React.FC = ({ ref={videoEl} src={video} /> +
); }; @@ -90,6 +101,7 @@ interface ISceneCardProps { export const SceneCard: React.FC = ( props: ISceneCardProps ) => { + const history = useHistory(); const { configuration } = React.useContext(ConfigurationContext); const file = useMemo( @@ -212,7 +224,7 @@ export const SceneCard: React.FC = ( src={sceneMovie.movie.front_image_path ?? ""} /> - = ( if (props.scene.scene_markers.length <= 0) return; const popoverContent = props.scene.scene_markers.map((marker) => { - const markerPopover = { ...marker, scene: { id: props.scene.id } }; - return ; + const markerWithScene = { ...marker, scene: { id: props.scene.id } }; + return ; }); return ( @@ -275,7 +287,7 @@ export const SceneCard: React.FC = ( if (props.scene.galleries.length <= 0) return; const popoverContent = props.scene.galleries.map((gallery) => ( - + )); return ( @@ -383,6 +395,18 @@ export const SceneCard: React.FC = ( }) : `/scenes/${props.scene.id}`; + function onScrubberClick(timestamp: number) { + const link = props.queue + ? props.queue.makeLink(props.scene.id, { + sceneIndex: props.index, + continue: cont, + start: timestamp, + }) + : `/scenes/${props.scene.id}?t=${timestamp}`; + + history.push(link); + } + return ( = ( video={props.scene.paths.preview ?? undefined} isPortrait={isPortrait()} soundActive={configuration?.interface?.soundOnPreview ?? false} + vttPath={props.scene.paths.vtt ?? undefined} + onScrubberClick={onScrubberClick} /> {maybeRenderSceneSpecsOverlay()} diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx index f658d34b16f..9694ca9ed29 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/PrimaryTags.tsx @@ -18,18 +18,19 @@ export const PrimaryTags: React.FC = ({ }) => { if (!sceneMarkers?.length) return
; - const primaries: Record = {}; - const primaryTags: Record = {}; + const primaryTagNames: Record = {}; + const markersByTag: Record = {}; sceneMarkers.forEach((m) => { - if (primaryTags[m.primary_tag.id]) primaryTags[m.primary_tag.id].push(m); - else { - primaryTags[m.primary_tag.id] = [m]; - primaries[m.primary_tag.id] = m.primary_tag; + if (primaryTagNames[m.primary_tag.id]) { + markersByTag[m.primary_tag.id].push(m); + } else { + primaryTagNames[m.primary_tag.id] = m.primary_tag.name; + markersByTag[m.primary_tag.id] = [m]; } }); - const primaryCards = Object.keys(primaryTags).map((id) => { - const markers = primaryTags[id].map((marker) => { + const primaryCards = Object.keys(markersByTag).map((id) => { + const markers = markersByTag[id].map((marker) => { const tags = marker.tags.map((tag) => ( {tag.name} @@ -59,7 +60,7 @@ export const PrimaryTags: React.FC = ({ return ( -

{primaries[id].name}

+

{primaryTagNames[id]}

{markers}
); diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx index 15989fa3cb6..860b26a78cc 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx @@ -20,7 +20,6 @@ import { queryScrapeSceneQueryFragment, } from "src/core/StashService"; import { - PerformerSelect, TagSelect, StudioSelect, GallerySelect, @@ -51,6 +50,11 @@ import { useRatingKeybinds } from "src/hooks/keybinds"; import { lazyComponent } from "src/utils/lazyComponent"; import isEqual from "lodash-es/isEqual"; import { DateInput } from "src/components/Shared/DateInput"; +import { yupDateString, yupUniqueStringList } from "src/utils/yup"; +import { + Performer, + PerformerSelect, +} from "src/components/Performers/PerformerSelect"; const SceneScrapeDialog = lazyComponent(() => import("./SceneScrapeDialog")); const SceneQueryModal = lazyComponent(() => import("./SceneQueryModal")); @@ -78,6 +82,7 @@ export const SceneEditPanel: React.FC = ({ const [galleries, setGalleries] = useState<{ id: string; title: string }[]>( [] ); + const [performers, setPerformers] = useState([]); const Scrapers = useListSceneScrapers(); const [fragmentScrapers, setFragmentScrapers] = useState([]); @@ -98,6 +103,10 @@ export const SceneEditPanel: React.FC = ({ ); }, [scene.galleries]); + useEffect(() => { + setPerformers(scene.performers ?? []); + }, [scene.performers]); + const { configuration: stashConfig } = React.useContext(ConfigurationContext); // Network state @@ -106,38 +115,8 @@ export const SceneEditPanel: React.FC = ({ const schema = yup.object({ title: yup.string().ensure(), code: yup.string().ensure(), - urls: yup - .array(yup.string().required()) - .defined() - .test({ - name: "unique", - test: (value) => { - const dupes = value - .map((e, i, a) => { - if (a.indexOf(e) !== i) { - return String(i - 1); - } else { - return null; - } - }) - .filter((e) => e !== null) as string[]; - if (dupes.length === 0) return true; - return new yup.ValidationError(dupes.join(" "), value, "urls"); - }, - }), - date: yup - .string() - .ensure() - .test({ - name: "date", - test: (value) => { - if (!value) return true; - if (!value.match(/^\d{4}-\d{2}-\d{2}$/)) return false; - if (Number.isNaN(Date.parse(value))) return false; - return true; - }, - message: intl.formatMessage({ id: "validation.date_invalid_form" }), - }), + urls: yupUniqueStringList("urls"), + date: yupDateString(intl), director: yup.string().ensure(), rating100: yup.number().nullable().defined(), gallery_ids: yup.array(yup.string().required()).defined(), @@ -218,6 +197,14 @@ export const SceneEditPanel: React.FC = ({ ); } + function onSetPerformers(items: Performer[]) { + setPerformers(items); + formik.setFieldValue( + "performer_ids", + items.map((item) => item.id) + ); + } + useRatingKeybinds( isVisible, stashConfig?.ui?.ratingSystemOptions?.type, @@ -414,6 +401,7 @@ export const SceneEditPanel: React.FC = ({ return ( onScrapeDialogClosed(s)} @@ -581,8 +569,15 @@ export const SceneEditPanel: React.FC = ({ }); if (idPerfs.length > 0) { - const newIds = idPerfs.map((p) => p.stored_id); - formik.setFieldValue("performer_ids", newIds as string[]); + onSetPerformers( + idPerfs.map((p) => { + return { + id: p.stored_id!, + name: p.name ?? "", + alias_list: [], + }; + }) + ); } } @@ -852,13 +847,8 @@ export const SceneEditPanel: React.FC = ({ - formik.setFieldValue( - "performer_ids", - items.map((item) => item.id) - ) - } - ids={formik.values.performer_ids} + onSelect={onSetPerformers} + values={performers} /> diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx index 51983ada9c6..9ba03490192 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneMarkerForm.tsx @@ -145,15 +145,14 @@ export const SceneMarkerForm: React.FC = ({
formik.setFieldValue("seconds", s)} + value={formik.values.seconds ?? 0} + setValue={(v) => formik.setFieldValue("seconds", v ?? null)} onReset={() => formik.setFieldValue( "seconds", Math.round(getPlayerPosition() ?? 0) ) } - numericValue={formik.values.seconds} - mandatory />
diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx index a75d7eac3e0..a0cc2257af6 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx @@ -1,292 +1,46 @@ -import React, { useMemo, useState } from "react"; +import React, { useState } from "react"; import * as GQL from "src/core/generated-graphql"; -import { - MovieSelect, - TagSelect, - StudioSelect, - PerformerSelect, -} from "src/components/Shared/Select"; import { ScrapeDialog, - ScrapeDialogRow, - ScrapeResult, ScrapedInputGroupRow, ScrapedTextAreaRow, ScrapedImageRow, - IHasName, ScrapedStringListRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import clone from "lodash-es/clone"; -import { - useStudioCreate, - usePerformerCreate, - useMovieCreate, - useTagCreate, -} from "src/core/StashService"; -import { useToast } from "src/hooks/Toast"; import { useIntl } from "react-intl"; import { uniq } from "lodash-es"; -import { scrapedPerformerToCreateInput } from "src/core/performers"; -import { scrapedMovieToCreateInput } from "src/core/movies"; - -interface IScrapedStudioRow { - title: string; - result: ScrapeResult; - onChange: (value: ScrapeResult) => void; - newStudio?: GQL.ScrapedStudio; - onCreateNew?: (value: GQL.ScrapedStudio) => void; -} - -export const ScrapedStudioRow: React.FC = ({ - title, - result, - onChange, - newStudio, - onCreateNew, -}) => { - function renderScrapedStudio( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ? [resultValue] : []; - - return ( - { - if (onChangeFn) { - onChangeFn(items[0]?.id); - } - }} - ids={value} - /> - ); - } - - return ( - renderScrapedStudio(result)} - renderNewField={() => - renderScrapedStudio(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={newStudio ? [newStudio] : undefined} - onCreateNew={() => { - if (onCreateNew && newStudio) onCreateNew(newStudio); - }} - /> - ); -}; - -interface IScrapedObjectsRow { - title: string; - result: ScrapeResult; - onChange: (value: ScrapeResult) => void; - newObjects?: T[]; - onCreateNew?: (value: T) => void; - renderObjects: ( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string[]) => void - ) => JSX.Element; -} - -export const ScrapedObjectsRow = ( - props: IScrapedObjectsRow -) => { - const { title, result, onChange, newObjects, onCreateNew, renderObjects } = - props; - - return ( - renderObjects(result)} - renderNewField={() => - renderObjects(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={newObjects} - onCreateNew={(i) => { - if (onCreateNew) onCreateNew(newObjects![i]); - }} - /> - ); -}; - -type IScrapedObjectRowImpl = Omit, "renderObjects">; - -export const ScrapedPerformersRow: React.FC< - IScrapedObjectRowImpl -> = ({ title, result, onChange, newObjects, onCreateNew }) => { - const performersCopy = useMemo(() => { - return ( - newObjects?.map((p) => { - const name: string = p.name ?? ""; - return { ...p, name }; - }) ?? [] - ); - }, [newObjects]); - - type PerformerType = GQL.ScrapedPerformer & { - name: string; - }; - - function renderScrapedPerformers( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string[]) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChangeFn) { - onChangeFn(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); - } - - return ( - - title={title} - result={result} - renderObjects={renderScrapedPerformers} - onChange={onChange} - newObjects={performersCopy} - onCreateNew={onCreateNew} - /> - ); -}; - -export const ScrapedMoviesRow: React.FC< - IScrapedObjectRowImpl -> = ({ title, result, onChange, newObjects, onCreateNew }) => { - const moviesCopy = useMemo(() => { - return ( - newObjects?.map((p) => { - const name: string = p.name ?? ""; - return { ...p, name }; - }) ?? [] - ); - }, [newObjects]); - - type MovieType = GQL.ScrapedMovie & { - name: string; - }; - - function renderScrapedMovies( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string[]) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChangeFn) { - onChangeFn(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); - } - - return ( - - title={title} - result={result} - renderObjects={renderScrapedMovies} - onChange={onChange} - newObjects={moviesCopy} - onCreateNew={onCreateNew} - /> - ); -}; - -export const ScrapedTagsRow: React.FC< - IScrapedObjectRowImpl -> = ({ title, result, onChange, newObjects, onCreateNew }) => { - function renderScrapedTags( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string[]) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChangeFn) { - onChangeFn(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); - } - - return ( - - title={title} - result={result} - renderObjects={renderScrapedTags} - onChange={onChange} - newObjects={newObjects} - onCreateNew={onCreateNew} - /> - ); -}; +import { Performer } from "src/components/Performers/PerformerSelect"; +import { IHasStoredID, sortStoredIdObjects } from "src/utils/data"; +import { + ObjectListScrapeResult, + ScrapeResult, +} from "src/components/Shared/ScrapeDialog/scrapeResult"; +import { + ScrapedMoviesRow, + ScrapedPerformersRow, + ScrapedStudioRow, + ScrapedTagsRow, +} from "src/components/Shared/ScrapeDialog/ScrapedObjectsRow"; +import { + useCreateScrapedMovie, + useCreateScrapedPerformer, + useCreateScrapedStudio, + useCreateScrapedTag, +} from "src/components/Shared/ScrapeDialog/createObjects"; interface ISceneScrapeDialogProps { scene: Partial; + scenePerformers: Performer[]; scraped: GQL.ScrapedScene; endpoint?: string; onClose: (scrapedScene?: GQL.ScrapedScene) => void; } -interface IHasStoredID { - stored_id?: string | null; -} - export const SceneScrapeDialog: React.FC = ({ scene, + scenePerformers, scraped, onClose, endpoint, @@ -365,10 +119,17 @@ export const SceneScrapeDialog: React.FC = ({ return ret; } - const [performers, setPerformers] = useState>( - new ScrapeResult( - sortIdList(scene.performer_ids), - mapStoredIdObjects(scraped.performers ?? undefined) + const [performers, setPerformers] = useState< + ObjectListScrapeResult + >( + new ObjectListScrapeResult( + sortStoredIdObjects( + scenePerformers.map((p) => ({ + stored_id: p.id, + name: p.name, + })) + ), + sortStoredIdObjects(scraped.performers ?? undefined) ) ); const [newPerformers, setNewPerformers] = useState( @@ -403,13 +164,34 @@ export const SceneScrapeDialog: React.FC = ({ new ScrapeResult(scene.cover_image, scraped.image) ); - const [createStudio] = useStudioCreate(); - const [createPerformer] = usePerformerCreate(); - const [createMovie] = useMovieCreate(); - const [createTag] = useTagCreate(); + const createNewStudio = useCreateScrapedStudio({ + scrapeResult: studio, + setScrapeResult: setStudio, + setNewObject: setNewStudio, + }); + + const createNewPerformer = useCreateScrapedPerformer({ + scrapeResult: performers, + setScrapeResult: setPerformers, + newObjects: newPerformers, + setNewObjects: setNewPerformers, + }); + + const createNewMovie = useCreateScrapedMovie({ + scrapeResult: movies, + setScrapeResult: setMovies, + newObjects: newMovies, + setNewObjects: setNewMovies, + }); + + const createNewTag = useCreateScrapedTag({ + scrapeResult: tags, + setScrapeResult: setTags, + newObjects: newTags, + setNewObjects: setNewTags, + }); const intl = useIntl(); - const Toast = useToast(); // don't show the dialog if nothing was scraped if ( @@ -436,143 +218,6 @@ export const SceneScrapeDialog: React.FC = ({ return <>; } - async function createNewStudio(toCreate: GQL.ScrapedStudio) { - try { - const result = await createStudio({ - variables: { - input: { - name: toCreate.name, - url: toCreate.url, - }, - }, - }); - - // set the new studio as the value - setStudio(studio.cloneWithValue(result.data!.studioCreate!.id)); - setNewStudio(undefined); - - Toast.success({ - content: ( - - Created studio: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewPerformer(toCreate: GQL.ScrapedPerformer) { - const input = scrapedPerformerToCreateInput(toCreate); - - try { - const result = await createPerformer({ - variables: { input }, - }); - - const newValue = [...(performers.newValue ?? [])]; - if (result.data?.performerCreate) - newValue.push(result.data.performerCreate.id); - - // add the new performer to the new performers value - const performerClone = performers.cloneWithValue(newValue); - setPerformers(performerClone); - - // remove the performer from the list - const newPerformersClone = newPerformers.concat(); - const pIndex = newPerformersClone.findIndex( - (p) => p.name === toCreate.name - ); - if (pIndex === -1) throw new Error("Could not find performer to remove"); - - newPerformersClone.splice(pIndex, 1); - - setNewPerformers(newPerformersClone); - - Toast.success({ - content: ( - - Created performer: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewMovie(toCreate: GQL.ScrapedMovie) { - const movieInput = scrapedMovieToCreateInput(toCreate); - try { - const result = await createMovie({ - variables: { input: movieInput }, - }); - - // add the new movie to the new movies value - const movieClone = movies.cloneWithValue(movies.newValue); - if (!movieClone.newValue) { - movieClone.newValue = []; - } - movieClone.newValue.push(result.data!.movieCreate!.id); - setMovies(movieClone); - - // remove the movie from the list - const newMoviesClone = newMovies.concat(); - const pIndex = newMoviesClone.findIndex((p) => p.name === toCreate.name); - if (pIndex === -1) throw new Error("Could not find movie to remove"); - newMoviesClone.splice(pIndex, 1); - - setNewMovies(newMoviesClone); - - Toast.success({ - content: ( - - Created movie: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewTag(toCreate: GQL.ScrapedTag) { - const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; - try { - const result = await createTag({ - variables: { - input: tagInput, - }, - }); - - const newValue = [...(tags.newValue ?? [])]; - if (result.data?.tagCreate) newValue.push(result.data.tagCreate.id); - - // add the new tag to the new tags value - const tagClone = tags.cloneWithValue(newValue); - setTags(tagClone); - - // remove the tag from the list - const newTagsClone = newTags.concat(); - const pIndex = newTagsClone.indexOf(toCreate); - if (pIndex === -1) throw new Error("Could not find tag to remove"); - newTagsClone.splice(pIndex, 1); - - setNewTags(newTagsClone); - - Toast.success({ - content: ( - - Created tag: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - function makeNewScrapedItem(): GQL.ScrapedSceneDataFragment { const newStudioValue = studio.getNewValue(); @@ -588,12 +233,7 @@ export const SceneScrapeDialog: React.FC = ({ name: "", } : undefined, - performers: performers.getNewValue()?.map((p) => { - return { - stored_id: p, - name: "", - }; - }), + performers: performers.getNewValue(), movies: movies.getNewValue()?.map((m) => { return { stored_id: m, diff --git a/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx b/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx index ca296a4b7dd..9ce3116671d 100644 --- a/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx @@ -12,26 +12,30 @@ import { FormattedMessage, useIntl } from "react-intl"; import { useToast } from "src/hooks/Toast"; import { faExchangeAlt, faSignInAlt } from "@fortawesome/free-solid-svg-icons"; import { - hasScrapedValues, ScrapeDialog, ScrapeDialogRow, ScrapedImageRow, ScrapedInputGroupRow, ScrapedStringListRow, ScrapedTextAreaRow, +} from "../Shared/ScrapeDialog/ScrapeDialog"; +import { clone, uniq } from "lodash-es"; +import { galleryTitle } from "src/core/galleries"; +import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; +import { ModalComponent } from "../Shared/Modal"; +import { IHasStoredID, sortStoredIdObjects } from "src/utils/data"; +import { + ObjectListScrapeResult, ScrapeResult, ZeroableScrapeResult, -} from "../Shared/ScrapeDialog"; -import { clone, uniq } from "lodash-es"; + hasScrapedValues, +} from "../Shared/ScrapeDialog/scrapeResult"; import { ScrapedMoviesRow, ScrapedPerformersRow, ScrapedStudioRow, ScrapedTagsRow, -} from "./SceneDetails/SceneScrapeDialog"; -import { galleryTitle } from "src/core/galleries"; -import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; -import { ModalComponent } from "../Shared/Modal"; +} from "../Shared/ScrapeDialog/ScrapedObjectsRow"; interface IStashIDsField { values: GQL.StashId[]; @@ -101,8 +105,25 @@ const SceneMergeDetails: React.FC = ({ return ret; } - const [performers, setPerformers] = useState>( - new ScrapeResult(sortIdList(dest.performers.map((p) => p.id))) + function idToStoredID(o: { id: string; name: string }) { + return { + stored_id: o.id, + name: o.name, + }; + } + + function uniqIDStoredIDs(objs: IHasStoredID[]) { + return objs.filter((o, i) => { + return objs.findIndex((oo) => oo.stored_id === o.stored_id) === i; + }); + } + + const [performers, setPerformers] = useState< + ObjectListScrapeResult + >( + new ObjectListScrapeResult( + sortStoredIdObjects(dest.performers.map(idToStoredID)) + ) ); const [movies, setMovies] = useState>( @@ -183,9 +204,9 @@ const SceneMergeDetails: React.FC = ({ ); setPerformers( - new ScrapeResult( - dest.performers.map((p) => p.id), - uniq(all.map((s) => s.performers.map((p) => p.id)).flat()) + new ObjectListScrapeResult( + sortStoredIdObjects(dest.performers.map(idToStoredID)), + uniqIDStoredIDs(all.map((s) => s.performers.map(idToStoredID)).flat()) ) ); setTags( @@ -559,7 +580,7 @@ const SceneMergeDetails: React.FC = ({ play_duration: playDuration.getNewValue(), gallery_ids: galleries.getNewValue(), studio_id: studio.getNewValue(), - performer_ids: performers.getNewValue(), + performer_ids: performers.getNewValue()?.map((p) => p.stored_id!), movies: movies.getNewValue()?.map((m) => { // find the equivalent movie in the original scenes const found = all diff --git a/ui/v2.5/src/components/Scenes/styles.scss b/ui/v2.5/src/components/Scenes/styles.scss index f9eb8d0497e..a4fc8f46cde 100644 --- a/ui/v2.5/src/components/Scenes/styles.scss +++ b/ui/v2.5/src/components/Scenes/styles.scss @@ -652,3 +652,66 @@ input[type="range"].blue-slider { margin-bottom: 1rem; text-align: right; } + +.preview-scrubber { + height: 100%; + position: absolute; + width: 100%; + + .scene-card-preview-image { + align-items: center; + display: flex; + justify-content: center; + overflow: hidden; + } + + .scrubber-image { + height: 100%; + width: 100%; + } + + .scrubber-timestamp { + bottom: calc(20px + 0.25rem); + font-weight: 400; + opacity: 0.75; + position: absolute; + right: 0.7rem; + text-shadow: 0 0 3px #000; + } +} + +.hover-scrubber { + bottom: 0; + height: 20px; + overflow: hidden; + position: absolute; + width: 100%; + + .hover-scrubber-area { + cursor: col-resize; + height: 100%; + position: absolute; + width: 100%; + z-index: 1; + } + + .hover-scrubber-indicator { + background-color: rgba(255, 255, 255, 0.1); + bottom: -100%; + height: 100%; + position: absolute; + transition: bottom 0.2s ease-in-out; + width: 100%; + + .hover-scrubber-indicator-marker { + background-color: rgba(255, 0, 0, 0.5); + bottom: 0; + height: 5px; + position: absolute; + } + } + + &:hover .hover-scrubber-indicator { + bottom: 0; + } +} diff --git a/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx b/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx index 6b6bf69bc0d..49f72b52db1 100644 --- a/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx +++ b/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx @@ -356,14 +356,21 @@ export const SettingsInterfacePanel: React.FC = () => { onChange={(v) => saveInterface({ maximumLoopDuration: v })} renderField={(value, setValue) => ( setValue(duration ?? 0)} + value={value} + setValue={(duration) => setValue(duration ?? 0)} /> )} renderValue={(v) => { return {DurationUtils.secondsToString(v ?? 0)}; }} /> + + saveUI({ showAbLoopControls: v })} + /> { } = React.useContext(SettingStateContext); // undefined to hide dialog, true for enable, false for disable - const [enableDisable, setEnableDisable] = useState( - undefined - ); + const [enableDisable, setEnableDisable] = useState(); const [enableUntilRestart, setEnableUntilRestart] = useState(false); - const [enableDuration, setEnableDuration] = useState( - undefined - ); + const [enableDuration, setEnableDuration] = useState(0); const [ipEntry, setIPEntry] = useState(""); - const [tempIP, setTempIP] = useState(); + const [tempIP, setTempIP] = useState(); const { data: statusData, loading, refetch: statusRefetch } = useDLNAStatus(); @@ -273,8 +269,8 @@ export const SettingsServicesPanel: React.FC = () => { setEnableDuration(v ?? 0)} + value={enableDuration} + setValue={(v) => setEnableDuration(v ?? 0)} disabled={enableUntilRestart} /> @@ -315,8 +311,8 @@ export const SettingsServicesPanel: React.FC = () => { setEnableDuration(v ?? 0)} + value={enableDuration} + setValue={(v) => setEnableDuration(v ?? 0)} disabled={enableUntilRestart} /> diff --git a/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx b/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx index b1cd7388585..23e5f21db56 100644 --- a/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx +++ b/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx @@ -133,6 +133,7 @@ export const GenerateOptions: React.FC = ({ id="phash-task" checked={options.phashes ?? false} headingID="dialogs.scene_gen.phash" + tooltipID="dialogs.scene_gen.phash_tooltip" onChange={(v) => setOptions({ phashes: v })} /> diff --git a/ui/v2.5/src/components/Settings/Tasks/ScanOptions.tsx b/ui/v2.5/src/components/Settings/Tasks/ScanOptions.tsx index 2f6588276d4..028a29f090b 100644 --- a/ui/v2.5/src/components/Settings/Tasks/ScanOptions.tsx +++ b/ui/v2.5/src/components/Settings/Tasks/ScanOptions.tsx @@ -53,6 +53,7 @@ export const ScanOptions: React.FC = ({ setOptions({ scanGenerateSprites: v })} /> diff --git a/ui/v2.5/src/components/Settings/context.tsx b/ui/v2.5/src/components/Settings/context.tsx index cbb353a6bdb..eae1bd4040f 100644 --- a/ui/v2.5/src/components/Settings/context.tsx +++ b/ui/v2.5/src/components/Settings/context.tsx @@ -134,7 +134,7 @@ export const SettingsContext: React.FC = ({ children }) => { setUI(data.configuration.ui); }, [data, error]); - const resetSuccess = useDebounce(() => setUpdateSuccess(undefined), [], 4000); + const resetSuccess = useDebounce(() => setUpdateSuccess(undefined), 4000); const onSuccess = useCallback(() => { setUpdateSuccess(true); @@ -158,7 +158,6 @@ export const SettingsContext: React.FC = ({ children }) => { setSaveError(e); } }, - [updateGeneralConfig, onSuccess], 500 ); @@ -208,7 +207,6 @@ export const SettingsContext: React.FC = ({ children }) => { setSaveError(e); } }, - [updateInterfaceConfig, onSuccess], 500 ); @@ -258,7 +256,6 @@ export const SettingsContext: React.FC = ({ children }) => { setSaveError(e); } }, - [updateDefaultsConfig, onSuccess], 500 ); @@ -308,7 +305,6 @@ export const SettingsContext: React.FC = ({ children }) => { setSaveError(e); } }, - [updateScrapingConfig, onSuccess], 500 ); @@ -342,25 +338,21 @@ export const SettingsContext: React.FC = ({ children }) => { } // saves the configuration if no further changes are made after a half second - const saveDLNAConfig = useDebounce( - async (input: GQL.ConfigDlnaInput) => { - try { - setUpdateSuccess(undefined); - await updateDLNAConfig({ - variables: { - input, - }, - }); - - setPendingDLNA(undefined); - onSuccess(); - } catch (e) { - setSaveError(e); - } - }, - [updateDLNAConfig, onSuccess], - 500 - ); + const saveDLNAConfig = useDebounce(async (input: GQL.ConfigDlnaInput) => { + try { + setUpdateSuccess(undefined); + await updateDLNAConfig({ + variables: { + input, + }, + }); + + setPendingDLNA(undefined); + onSuccess(); + } catch (e) { + setSaveError(e); + } + }, 500); useEffect(() => { if (!pendingDLNA) { @@ -392,25 +384,21 @@ export const SettingsContext: React.FC = ({ children }) => { } // saves the configuration if no further changes are made after a half second - const saveUIConfig = useDebounce( - async (input: IUIConfig) => { - try { - setUpdateSuccess(undefined); - await updateUIConfig({ - variables: { - input, - }, - }); - - setPendingUI(undefined); - onSuccess(); - } catch (e) { - setSaveError(e); - } - }, - [updateUIConfig, onSuccess], - 500 - ); + const saveUIConfig = useDebounce(async (input: IUIConfig) => { + try { + setUpdateSuccess(undefined); + await updateUIConfig({ + variables: { + input, + }, + }); + + setPendingUI(undefined); + onSuccess(); + } catch (e) { + setSaveError(e); + } + }, 500); useEffect(() => { if (!pendingUI) { diff --git a/ui/v2.5/src/components/Shared/DurationInput.tsx b/ui/v2.5/src/components/Shared/DurationInput.tsx index 0e346acd759..b0d396df143 100644 --- a/ui/v2.5/src/components/Shared/DurationInput.tsx +++ b/ui/v2.5/src/components/Shared/DurationInput.tsx @@ -3,67 +3,58 @@ import { faChevronUp, faClock, } from "@fortawesome/free-solid-svg-icons"; -import React, { useState, useEffect } from "react"; +import React, { useState } from "react"; import { Button, ButtonGroup, InputGroup, Form } from "react-bootstrap"; import { Icon } from "./Icon"; import DurationUtils from "src/utils/duration"; interface IProps { disabled?: boolean; - numericValue: number | undefined; - mandatory?: boolean; - onValueChange( - valueAsNumber: number | undefined, - valueAsString?: string - ): void; + value: number | undefined; + setValue(value: number | undefined): void; onReset?(): void; className?: string; placeholder?: string; } -export const DurationInput: React.FC = (props: IProps) => { - const [value, setValue] = useState( - props.numericValue !== undefined - ? DurationUtils.secondsToString(props.numericValue) - : undefined - ); +export const DurationInput: React.FC = ({ + disabled, + value, + setValue, + onReset, + className, + placeholder, +}) => { + const [tmpValue, setTmpValue] = useState(); - useEffect(() => { - if (props.numericValue !== undefined || props.mandatory) { - setValue(DurationUtils.secondsToString(props.numericValue ?? 0)); - } else { - setValue(undefined); - } - }, [props.numericValue, props.mandatory]); + function onChange(e: React.ChangeEvent) { + setTmpValue(e.currentTarget.value); + } - function increment() { - if (value === undefined) { - return; + function onBlur() { + if (tmpValue !== undefined) { + setValue(DurationUtils.stringToSeconds(tmpValue)); + setTmpValue(undefined); } + } - let seconds = DurationUtils.stringToSeconds(value); - seconds += 1; - props.onValueChange(seconds, DurationUtils.secondsToString(seconds)); + function increment() { + setTmpValue(undefined); + setValue((value ?? 0) + 1); } function decrement() { - if (value === undefined) { - return; - } - - let seconds = DurationUtils.stringToSeconds(value); - seconds -= 1; - props.onValueChange(seconds, DurationUtils.secondsToString(seconds)); + setTmpValue(undefined); + setValue((value ?? 0) - 1); } function renderButtons() { - if (!props.disabled) { + if (!disabled) { return ( ); } } + let inputValue = ""; + if (tmpValue !== undefined) { + inputValue = tmpValue; + } else if (value !== undefined) { + inputValue = DurationUtils.secondsToString(value); + } + return ( -
+
) => - setValue(e.currentTarget.value) - } - onBlur={() => { - if (props.mandatory || (value !== undefined && value !== "")) { - props.onValueChange(DurationUtils.stringToSeconds(value), value); - } else { - props.onValueChange(undefined); - } - }} - placeholder={ - !props.disabled - ? props.placeholder - ? `${props.placeholder} (hh:mm:ss)` - : "hh:mm:ss" - : undefined - } + disabled={disabled} + value={inputValue} + onChange={onChange} + onBlur={onBlur} + placeholder={placeholder ? `${placeholder} (hh:mm:ss)` : "hh:mm:ss"} /> {maybeRenderReset()} diff --git a/ui/v2.5/src/components/Shared/FilterSelect.tsx b/ui/v2.5/src/components/Shared/FilterSelect.tsx new file mode 100644 index 00000000000..3489f1851d4 --- /dev/null +++ b/ui/v2.5/src/components/Shared/FilterSelect.tsx @@ -0,0 +1,253 @@ +import React, { useMemo, useState } from "react"; +import { + OnChangeValue, + StylesConfig, + GroupBase, + OptionsOrGroups, + Options, +} from "react-select"; +import AsyncSelect from "react-select/async"; +import AsyncCreatableSelect, { + AsyncCreatableProps, +} from "react-select/async-creatable"; + +import { useToast } from "src/hooks/Toast"; +import { useDebounce } from "src/hooks/debounce"; + +interface IHasID { + id: string; +} + +export type Option = { value: string; object: T }; + +interface ISelectProps + extends AsyncCreatableProps, IsMulti, GroupBase>> { + selectedOptions?: OnChangeValue, IsMulti>; + creatable?: boolean; + isLoading?: boolean; + isDisabled?: boolean; + placeholder?: string; + showDropdown?: boolean; + groupHeader?: string; + noOptionsMessageText?: string | null; +} + +interface IFilterSelectProps + extends Pick< + ISelectProps, + | "selectedOptions" + | "isLoading" + | "isMulti" + | "components" + | "placeholder" + | "closeMenuOnSelect" + > {} + +const getSelectedItems = ( + selectedItems: OnChangeValue, boolean> +) => { + if (Array.isArray(selectedItems)) { + return selectedItems; + } else if (selectedItems) { + return [selectedItems]; + } else { + return []; + } +}; + +const SelectComponent = ( + props: ISelectProps +) => { + const { + selectedOptions, + isLoading, + isDisabled = false, + creatable = false, + components, + placeholder, + showDropdown = true, + noOptionsMessageText: noOptionsMessage = "None", + } = props; + + const styles: StylesConfig, IsMulti> = { + option: (base) => ({ + ...base, + color: "#000", + }), + container: (base, state) => ({ + ...base, + zIndex: state.isFocused ? 10 : base.zIndex, + }), + multiValueRemove: (base, state) => ({ + ...base, + color: state.isFocused ? base.color : "#333333", + }), + }; + + const componentProps = { + ...props, + styles, + defaultOptions: true, + value: selectedOptions, + className: "react-select", + classNamePrefix: "react-select", + noOptionsMessage: () => noOptionsMessage, + placeholder: isDisabled ? "" : placeholder, + components: { + ...components, + IndicatorSeparator: () => null, + ...((!showDropdown || isDisabled) && { DropdownIndicator: () => null }), + ...(isDisabled && { MultiValueRemove: () => null }), + }, + }; + + return creatable ? ( + + ) : ( + + ); +}; + +export interface IFilterValueProps { + values?: T[]; + onSelect?: (item: T[]) => void; +} + +export interface IFilterProps { + noSelectionString?: string; + className?: string; + isMulti?: boolean; + isClearable?: boolean; + isDisabled?: boolean; + creatable?: boolean; + menuPortalTarget?: HTMLElement | null; +} + +export interface IFilterComponentProps extends IFilterProps { + loadOptions: (inputValue: string) => Promise[]>; + onCreate?: ( + name: string + ) => Promise<{ value: string; item: T; message: string }>; + getNamedObject: (id: string, name: string) => T; + isValidNewOption: (inputValue: string, options: T[]) => boolean; +} + +export const FilterSelectComponent = < + T extends IHasID, + IsMulti extends boolean +>( + props: IFilterValueProps & + IFilterComponentProps & + IFilterSelectProps +) => { + const { + values, + isMulti, + onSelect, + isValidNewOption, + getNamedObject, + loadOptions, + } = props; + const [loading, setLoading] = useState(false); + const Toast = useToast(); + + const selectedOptions = useMemo(() => { + if (isMulti && values) { + return values.map( + (value) => + ({ + object: value, + value: value.id, + } as Option) + ) as unknown as OnChangeValue, IsMulti>; + } + + if (values?.length) { + return { + object: values[0], + value: values[0].id, + } as OnChangeValue, IsMulti>; + } + }, [values, isMulti]); + + const onChange = (selectedItems: OnChangeValue, boolean>) => { + const selected = getSelectedItems(selectedItems); + + onSelect?.(selected.map((item) => item.object)); + }; + + const onCreate = async (name: string) => { + try { + setLoading(true); + const { value, item: newItem, message } = await props.onCreate!(name); + const newItemOption = { + object: newItem, + value, + } as Option; + if (!isMulti) { + onChange(newItemOption); + } else { + const o = (selectedOptions ?? []) as Option[]; + onChange([...o, newItemOption]); + } + + setLoading(false); + Toast.success({ + content: ( + + {message}: {name} + + ), + }); + } catch (e) { + Toast.error(e); + } + }; + + const getNewOptionData = ( + inputValue: string, + optionLabel: React.ReactNode + ) => { + return { + value: "", + object: getNamedObject("", optionLabel as string), + }; + }; + + const validNewOption = ( + inputValue: string, + value: Options>, + options: OptionsOrGroups, GroupBase>> + ) => { + return isValidNewOption( + inputValue, + (options as Options>).map((o) => o.object) + ); + }; + + const debounceDelay = 100; + const debounceLoadOptions = useDebounce((inputValue, callback) => { + loadOptions(inputValue).then(callback); + }, debounceDelay); + + return ( + + {...props} + loadOptions={debounceLoadOptions} + isLoading={props.isLoading || loading} + onChange={onChange} + selectedOptions={selectedOptions} + onCreateOption={props.creatable ? onCreate : undefined} + getNewOptionData={getNewOptionData} + isValidNewOption={validNewOption} + /> + ); +}; + +export interface IFilterIDProps { + ids?: string[]; + onSelect?: (item: T[]) => void; +} diff --git a/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx b/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx index 4c7bdafa36f..50b7e45b805 100644 --- a/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx +++ b/ui/v2.5/src/components/Shared/FolderSelect/FolderSelect.tsx @@ -5,7 +5,7 @@ import { Icon } from "../Icon"; import { LoadingIndicator } from "../LoadingIndicator"; import { useDirectory } from "src/core/StashService"; import { faEllipsis, faTimes } from "@fortawesome/free-solid-svg-icons"; -import { useDebouncedSetState } from "src/hooks/debounce"; +import { useDebounce } from "src/hooks/debounce"; interface IProps { currentDirectory: string; @@ -44,7 +44,7 @@ export const FolderSelect: React.FC = ({ (error && hideError ? [] : defaultDirectoriesOrEmpty) : defaultDirectoriesOrEmpty; - const debouncedSetDirectory = useDebouncedSetState(setDirectory, 250); + const debouncedSetDirectory = useDebounce(setDirectory, 250); useEffect(() => { if (currentDirectory !== directory) { diff --git a/ui/v2.5/src/components/Shared/PerformerPopoverButton.tsx b/ui/v2.5/src/components/Shared/PerformerPopoverButton.tsx index 9d0cfb6fea5..0f98f732b63 100644 --- a/ui/v2.5/src/components/Shared/PerformerPopoverButton.tsx +++ b/ui/v2.5/src/components/Shared/PerformerPopoverButton.tsx @@ -6,7 +6,7 @@ import * as GQL from "src/core/generated-graphql"; import { sortPerformers } from "src/core/performers"; import { HoverPopover } from "./HoverPopover"; import { Icon } from "./Icon"; -import { TagLink } from "./TagLink"; +import { PerformerLink } from "./TagLink"; interface IProps { performers: Partial[]; @@ -26,7 +26,11 @@ export const PerformerPopoverButton: React.FC = ({ performers }) => { src={performer.image_path ?? ""} /> - +
)); diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog.tsx b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialog.tsx similarity index 84% rename from ui/v2.5/src/components/Shared/ScrapeDialog.tsx rename to ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialog.tsx index 425419ab031..8796aab0254 100644 --- a/ui/v2.5/src/components/Shared/ScrapeDialog.tsx +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialog.tsx @@ -8,10 +8,9 @@ import { FormControl, Badge, } from "react-bootstrap"; -import { CollapseButton } from "./CollapseButton"; -import { Icon } from "./Icon"; -import { ModalComponent } from "./Modal"; -import isEqual from "lodash-es/isEqual"; +import { CollapseButton } from "../CollapseButton"; +import { Icon } from "../Icon"; +import { ModalComponent } from "../Modal"; import clone from "lodash-es/clone"; import { FormattedMessage, useIntl } from "react-intl"; import { @@ -21,78 +20,10 @@ import { faTimes, } from "@fortawesome/free-solid-svg-icons"; import { getCountryByISO } from "src/utils/country"; -import { CountrySelect } from "./CountrySelect"; -import { StringListInput } from "./StringListInput"; -import { ImageSelector } from "./ImageSelector"; - -export class ScrapeResult { - public newValue?: T; - public originalValue?: T; - public scraped: boolean = false; - public useNewValue: boolean = false; - - public constructor( - originalValue?: T | null, - newValue?: T | null, - useNewValue?: boolean - ) { - this.originalValue = originalValue ?? undefined; - this.newValue = newValue ?? undefined; - // NOTE: this means that zero values are treated as null - // this is incorrect for numbers and booleans, but correct for strings - const hasNewValue = !!this.newValue; - - const valuesEqual = isEqual(originalValue, newValue); - this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); - this.scraped = hasNewValue && !valuesEqual; - } - - public setOriginalValue(value?: T) { - this.originalValue = value; - this.newValue = value; - } - - public cloneWithValue(value?: T) { - const ret = clone(this); - - ret.newValue = value; - ret.useNewValue = !isEqual(ret.newValue, ret.originalValue); - - // #2691 - if we're setting the value, assume it should be treated as - // scraped - ret.scraped = true; - - return ret; - } - - public getNewValue() { - if (this.useNewValue) { - return this.newValue; - } - } -} - -// for types where !!value is a valid value (boolean and number) -export class ZeroableScrapeResult extends ScrapeResult { - public constructor( - originalValue?: T | null, - newValue?: T | null, - useNewValue?: boolean - ) { - super(originalValue, newValue, useNewValue); - - const hasNewValue = this.newValue !== undefined; - - const valuesEqual = isEqual(originalValue, newValue); - this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); - this.scraped = hasNewValue && !valuesEqual; - } -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export function hasScrapedValues(values: ScrapeResult[]) { - return values.some((r) => r.scraped); -} +import { CountrySelect } from "../CountrySelect"; +import { StringListInput } from "../StringListInput"; +import { ImageSelector } from "../ImageSelector"; +import { ScrapeResult } from "./scrapeResult"; export interface IHasName { name: string | undefined; diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapedObjectsRow.tsx b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapedObjectsRow.tsx new file mode 100644 index 00000000000..606821c789d --- /dev/null +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapedObjectsRow.tsx @@ -0,0 +1,269 @@ +import React, { useMemo } from "react"; +import * as GQL from "src/core/generated-graphql"; +import { + MovieSelect, + TagSelect, + StudioSelect, +} from "src/components/Shared/Select"; +import { + ScrapeDialogRow, + IHasName, +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; +import { PerformerSelect } from "src/components/Performers/PerformerSelect"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; + +interface IScrapedStudioRow { + title: string; + result: ScrapeResult; + onChange: (value: ScrapeResult) => void; + newStudio?: GQL.ScrapedStudio; + onCreateNew?: (value: GQL.ScrapedStudio) => void; +} + +export const ScrapedStudioRow: React.FC = ({ + title, + result, + onChange, + newStudio, + onCreateNew, +}) => { + function renderScrapedStudio( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: string) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ? [resultValue] : []; + + return ( + { + if (onChangeFn) { + onChangeFn(items[0]?.id); + } + }} + ids={value} + /> + ); + } + + return ( + renderScrapedStudio(result)} + renderNewField={() => + renderScrapedStudio(result, true, (value) => + onChange(result.cloneWithValue(value)) + ) + } + onChange={onChange} + newValues={newStudio ? [newStudio] : undefined} + onCreateNew={() => { + if (onCreateNew && newStudio) onCreateNew(newStudio); + }} + /> + ); +}; + +interface IScrapedObjectsRow { + title: string; + result: ScrapeResult; + onChange: (value: ScrapeResult) => void; + newObjects?: T[]; + onCreateNew?: (value: T) => void; + renderObjects: ( + result: ScrapeResult, + isNew?: boolean, + onChange?: (value: R[]) => void + ) => JSX.Element; +} + +export const ScrapedObjectsRow = ( + props: IScrapedObjectsRow +) => { + const { title, result, onChange, newObjects, onCreateNew, renderObjects } = + props; + + return ( + renderObjects(result)} + renderNewField={() => + renderObjects(result, true, (value) => + onChange(result.cloneWithValue(value)) + ) + } + onChange={onChange} + newValues={newObjects} + onCreateNew={(i) => { + if (onCreateNew) onCreateNew(newObjects![i]); + }} + /> + ); +}; + +type IScrapedObjectRowImpl = Omit< + IScrapedObjectsRow, + "renderObjects" +>; + +export const ScrapedPerformersRow: React.FC< + IScrapedObjectRowImpl +> = ({ title, result, onChange, newObjects, onCreateNew }) => { + const performersCopy = useMemo(() => { + return ( + newObjects?.map((p) => { + const name: string = p.name ?? ""; + return { ...p, name }; + }) ?? [] + ); + }, [newObjects]); + + function renderScrapedPerformers( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: GQL.ScrapedPerformer[]) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ?? []; + + const selectValue = value.map((p) => { + const alias_list: string[] = []; + return { + id: p.stored_id ?? "", + name: p.name ?? "", + alias_list, + }; + }); + + return ( + { + if (onChangeFn) { + onChangeFn(items); + } + }} + values={selectValue} + /> + ); + } + + type PerformerType = GQL.ScrapedPerformer & { + name: string; + }; + + return ( + + title={title} + result={result} + renderObjects={renderScrapedPerformers} + onChange={onChange} + newObjects={performersCopy} + onCreateNew={onCreateNew} + /> + ); +}; + +export const ScrapedMoviesRow: React.FC< + IScrapedObjectRowImpl +> = ({ title, result, onChange, newObjects, onCreateNew }) => { + const moviesCopy = useMemo(() => { + return ( + newObjects?.map((p) => { + const name: string = p.name ?? ""; + return { ...p, name }; + }) ?? [] + ); + }, [newObjects]); + + type MovieType = GQL.ScrapedMovie & { + name: string; + }; + + function renderScrapedMovies( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: string[]) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ?? []; + + return ( + { + if (onChangeFn) { + onChangeFn(items.map((i) => i.id)); + } + }} + ids={value} + /> + ); + } + + return ( + + title={title} + result={result} + renderObjects={renderScrapedMovies} + onChange={onChange} + newObjects={moviesCopy} + onCreateNew={onCreateNew} + /> + ); +}; + +export const ScrapedTagsRow: React.FC< + IScrapedObjectRowImpl +> = ({ title, result, onChange, newObjects, onCreateNew }) => { + function renderScrapedTags( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: string[]) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ?? []; + + return ( + { + if (onChangeFn) { + onChangeFn(items.map((i) => i.id)); + } + }} + ids={value} + /> + ); + } + + return ( + + title={title} + result={result} + renderObjects={renderScrapedTags} + onChange={onChange} + newObjects={newObjects} + onCreateNew={onCreateNew} + /> + ); +}; diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/createObjects.ts b/ui/v2.5/src/components/Shared/ScrapeDialog/createObjects.ts new file mode 100644 index 00000000000..89f62845d2c --- /dev/null +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/createObjects.ts @@ -0,0 +1,192 @@ +import { useToast } from "src/hooks/Toast"; +import * as GQL from "src/core/generated-graphql"; +import { + useMovieCreate, + usePerformerCreate, + useStudioCreate, + useTagCreate, +} from "src/core/StashService"; +import { ScrapeResult } from "./scrapeResult"; +import { useIntl } from "react-intl"; +import { scrapedPerformerToCreateInput } from "src/core/performers"; +import { scrapedMovieToCreateInput } from "src/core/movies"; + +function useCreateObject( + entityTypeID: string, + createFunc: (o: T) => Promise +) { + const Toast = useToast(); + const intl = useIntl(); + + async function createNewObject(o: T) { + try { + await createFunc(o); + + Toast.success({ + content: intl.formatMessage( + { id: "toast.created_entity" }, + { + entity: intl + .formatMessage({ id: entityTypeID }) + .toLocaleLowerCase(), + } + ), + }); + } catch (e) { + Toast.error(e); + } + } + + return createNewObject; +} + +interface IUseCreateNewStudioProps { + scrapeResult: ScrapeResult; + setScrapeResult: (scrapeResult: ScrapeResult) => void; + setNewObject: (newObject: GQL.ScrapedStudio | undefined) => void; +} + +export function useCreateScrapedStudio(props: IUseCreateNewStudioProps) { + const [createStudio] = useStudioCreate(); + + const { scrapeResult, setScrapeResult, setNewObject } = props; + + async function createNewStudio(toCreate: GQL.ScrapedStudio) { + const result = await createStudio({ + variables: { + input: { + name: toCreate.name, + url: toCreate.url, + }, + }, + }); + + // set the new studio as the value + setScrapeResult(scrapeResult.cloneWithValue(result.data!.studioCreate!.id)); + setNewObject(undefined); + } + + return useCreateObject("studio", createNewStudio); +} + +interface IUseCreateNewPerformerProps { + scrapeResult: ScrapeResult; + setScrapeResult: (scrapeResult: ScrapeResult) => void; + newObjects: GQL.ScrapedPerformer[]; + setNewObjects: (newObject: GQL.ScrapedPerformer[]) => void; +} + +export function useCreateScrapedPerformer(props: IUseCreateNewPerformerProps) { + const [createPerformer] = usePerformerCreate(); + + const { scrapeResult, setScrapeResult, newObjects, setNewObjects } = props; + + async function createNewPerformer(toCreate: GQL.ScrapedPerformer) { + const input = scrapedPerformerToCreateInput(toCreate); + + const result = await createPerformer({ + variables: { input }, + }); + + const newValue = [...(scrapeResult.newValue ?? [])]; + if (result.data?.performerCreate) + newValue.push({ + stored_id: result.data.performerCreate.id, + name: result.data.performerCreate.name, + }); + + // add the new performer to the new performers value + const performerClone = scrapeResult.cloneWithValue(newValue); + setScrapeResult(performerClone); + + // remove the performer from the list + const newPerformersClone = newObjects.concat(); + const pIndex = newPerformersClone.findIndex( + (p) => p.name === toCreate.name + ); + if (pIndex === -1) throw new Error("Could not find performer to remove"); + + newPerformersClone.splice(pIndex, 1); + + setNewObjects(newPerformersClone); + } + + return useCreateObject("performer", createNewPerformer); +} + +interface IUseCreateNewObjectIDListProps< + T extends { name?: string | undefined | null } +> { + scrapeResult: ScrapeResult; + setScrapeResult: (scrapeResult: ScrapeResult) => void; + newObjects: T[]; + setNewObjects: (newObject: T[]) => void; +} + +function useCreateNewObjectIDList< + T extends { name?: string | undefined | null } +>( + entityTypeID: string, + props: IUseCreateNewObjectIDListProps, + createObject: (toCreate: T) => Promise +) { + const { scrapeResult, setScrapeResult, newObjects, setNewObjects } = props; + + async function createNewObject(toCreate: T) { + const newID = await createObject(toCreate); + + // add the new object to the new objects value + const newResult = scrapeResult.cloneWithValue(scrapeResult.newValue); + if (!newResult.newValue) { + newResult.newValue = []; + } + newResult.newValue.push(newID); + setScrapeResult(newResult); + + // remove the object from the list + const newObjectsClone = newObjects.concat(); + const pIndex = newObjectsClone.findIndex((p) => p.name === toCreate.name); + if (pIndex === -1) throw new Error("Could not find object to remove"); + newObjectsClone.splice(pIndex, 1); + + setNewObjects(newObjectsClone); + } + + return useCreateObject(entityTypeID, createNewObject); +} + +export function useCreateScrapedMovie( + props: IUseCreateNewObjectIDListProps +) { + const [createMovie] = useMovieCreate(); + + async function createNewMovie(toCreate: GQL.ScrapedMovie) { + const movieInput = scrapedMovieToCreateInput(toCreate); + const result = await createMovie({ + variables: { input: movieInput }, + }); + + return result.data?.movieCreate?.id ?? ""; + } + + return useCreateNewObjectIDList("movie", props, createNewMovie); +} + +export function useCreateScrapedTag( + props: IUseCreateNewObjectIDListProps +) { + const [createTag] = useTagCreate(); + + async function createNewTag(toCreate: GQL.ScrapedTag) { + const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; + const result = await createTag({ + variables: { + input: tagInput, + }, + }); + + return result.data?.tagCreate?.id ?? ""; + } + + return useCreateNewObjectIDList("tag", props, createNewTag); +} diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts b/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts new file mode 100644 index 00000000000..195541fe8a2 --- /dev/null +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts @@ -0,0 +1,112 @@ +import lodashIsEqual from "lodash-es/isEqual"; +import clone from "lodash-es/clone"; +import { IHasStoredID } from "src/utils/data"; + +export class ScrapeResult { + public newValue?: T; + public originalValue?: T; + public scraped: boolean = false; + public useNewValue: boolean = false; + private isEqual: ( + v1: T | undefined | null, + v2: T | undefined | null + ) => boolean; + + public constructor( + originalValue?: T | null, + newValue?: T | null, + useNewValue?: boolean, + isEqual: ( + v1: T | undefined | null, + v2: T | undefined | null + ) => boolean = lodashIsEqual + ) { + this.originalValue = originalValue ?? undefined; + this.newValue = newValue ?? undefined; + this.isEqual = isEqual; + + // NOTE: this means that zero values are treated as null + // this is incorrect for numbers and booleans, but correct for strings + const hasNewValue = !!this.newValue; + + const valuesEqual = isEqual(originalValue, newValue); + this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); + this.scraped = hasNewValue && !valuesEqual; + } + + public setOriginalValue(value?: T) { + this.originalValue = value; + this.newValue = value; + } + + public cloneWithValue(value?: T) { + const ret = clone(this); + + ret.newValue = value; + ret.useNewValue = !this.isEqual(ret.newValue, ret.originalValue); + + // #2691 - if we're setting the value, assume it should be treated as + // scraped + ret.scraped = true; + + return ret; + } + + public getNewValue() { + if (this.useNewValue) { + return this.newValue; + } + } +} + +// for types where !!value is a valid value (boolean and number) +export class ZeroableScrapeResult extends ScrapeResult { + public constructor( + originalValue?: T | null, + newValue?: T | null, + useNewValue?: boolean, + isEqual: ( + v1: T | undefined | null, + v2: T | undefined | null + ) => boolean = lodashIsEqual + ) { + super(originalValue, newValue, useNewValue, isEqual); + + const hasNewValue = this.newValue !== undefined; + + const valuesEqual = isEqual(originalValue, newValue); + this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); + this.scraped = hasNewValue && !valuesEqual; + } +} + +function storedIDsEqual( + o1: T[] | undefined | null, + o2: T[] | undefined | null +) { + return ( + !!o1 && + !!o2 && + o1.length === o2.length && + o1.every((o) => { + return o2.find((oo) => o.stored_id === oo.stored_id); + }) + ); +} + +export class ObjectListScrapeResult< + T extends IHasStoredID +> extends ScrapeResult { + public constructor( + originalValue?: T[] | null, + newValue?: T[] | null, + useNewValue?: boolean + ) { + super(originalValue, newValue, useNewValue, storedIDsEqual); + } +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function hasScrapedValues(values: ScrapeResult[]) { + return values.some((r) => r.scraped); +} diff --git a/ui/v2.5/src/components/Shared/Select.tsx b/ui/v2.5/src/components/Shared/Select.tsx index e9f14a96f15..59b16bead02 100644 --- a/ui/v2.5/src/components/Shared/Select.tsx +++ b/ui/v2.5/src/components/Shared/Select.tsx @@ -17,11 +17,9 @@ import { useAllTagsForFilter, useAllMoviesForFilter, useAllStudiosForFilter, - useAllPerformersForFilter, useMarkerStrings, useTagCreate, useStudioCreate, - usePerformerCreate, useMovieCreate, } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; @@ -32,8 +30,9 @@ import { objectTitle } from "src/core/files"; import { galleryTitle } from "src/core/galleries"; import { TagPopover } from "../Tags/TagPopover"; import { defaultMaxOptionsShown, IUIConfig } from "src/core/config"; -import { useDebouncedSetState } from "src/hooks/debounce"; +import { useDebounce } from "src/hooks/debounce"; import { Placement } from "react-bootstrap/esm/Overlay"; +import { PerformerIDSelect } from "../Performers/PerformerSelect"; export type SelectObject = { id: string; @@ -47,12 +46,9 @@ interface ITypeProps { | "galleries" | "performers" | "studios" - | "parent_studios" | "tags" - | "sceneTags" - | "performerTags" - | "parentTags" - | "childTags" + | "scene_tags" + | "performer_tags" | "movies"; } interface IFilterProps { @@ -358,7 +354,7 @@ export const GallerySelect: React.FC = (props) => { value: g.id, })); - const onInputChange = useDebouncedSetState(setQuery, 500); + const onInputChange = useDebounce(setQuery, 500); const onChange = (selectedItems: OnChangeValue) => { const selected = getSelectedItems(selectedItems); @@ -409,7 +405,7 @@ export const SceneSelect: React.FC = (props) => { value: s.id, })); - const onInputChange = useDebouncedSetState(setQuery, 500); + const onInputChange = useDebounce(setQuery, 500); const onChange = (selectedItems: OnChangeValue) => { const selected = getSelectedItems(selectedItems); @@ -459,7 +455,7 @@ export const ImageSelect: React.FC = (props) => { value: s.id, })); - const onInputChange = useDebouncedSetState(setQuery, 500); + const onInputChange = useDebounce(setQuery, 500); const onChange = (selectedItems: OnChangeValue) => { const selected = getSelectedItems(selectedItems); @@ -535,152 +531,7 @@ export const MarkerTitleSuggest: React.FC = (props) => { }; export const PerformerSelect: React.FC = (props) => { - const [performerAliases, setPerformerAliases] = useState< - Record - >({}); - const [performerDisambiguations, setPerformerDisambiguations] = useState< - Record - >({}); - const [allAliases, setAllAliases] = useState([]); - const { data, loading } = useAllPerformersForFilter(); - const [createPerformer] = usePerformerCreate(); - - const { configuration } = React.useContext(ConfigurationContext); - const intl = useIntl(); - const defaultCreatable = - !configuration?.interface.disableDropdownCreate.performer ?? true; - - const performers = useMemo( - () => data?.allPerformers ?? [], - [data?.allPerformers] - ); - - useEffect(() => { - // build the tag aliases map - const newAliases: Record = {}; - const newDisambiguations: Record = {}; - const newAll: string[] = []; - performers.forEach((t) => { - if (t.alias_list.length) { - newAliases[t.id] = t.alias_list; - } - newAll.push(...t.alias_list); - if (t.disambiguation) { - newDisambiguations[t.id] = t.disambiguation; - } - }); - setPerformerAliases(newAliases); - setAllAliases(newAll); - setPerformerDisambiguations(newDisambiguations); - }, [performers]); - - const PerformerOption: React.FC> = ( - optionProps - ) => { - const { inputValue } = optionProps.selectProps; - - let thisOptionProps = optionProps; - - let { label } = optionProps.data; - const id = Number(optionProps.data.value); - - if (id && performerDisambiguations[id]) { - label += ` (${performerDisambiguations[id]})`; - } - - if ( - inputValue && - !optionProps.label.toLowerCase().includes(inputValue.toLowerCase()) - ) { - // must be alias - label += " (alias)"; - } - - if (label != optionProps.data.label) { - thisOptionProps = { - ...optionProps, - children: label, - }; - } - - return ; - }; - - const filterOption = (option: Option, rawInput: string): boolean => { - if (!rawInput) { - return true; - } - - const input = rawInput.toLowerCase(); - const optionVal = option.label.toLowerCase(); - - if (optionVal.includes(input)) { - return true; - } - - // search for performer aliases - const aliases = performerAliases[option.value]; - return aliases && aliases.some((a) => a.toLowerCase().includes(input)); - }; - - const isValidNewOption = ( - inputValue: string, - value: Options