diff --git a/.mockery.yml b/.mockery.yml new file mode 100644 index 00000000000..0dcb289e3d7 --- /dev/null +++ b/.mockery.yml @@ -0,0 +1,4 @@ +dir: ./pkg/models +name: ".*ReaderWriter" +outpkg: mocks +output: ./pkg/models/mocks diff --git a/Makefile b/Makefile index 70154258553..0f6a43cdffb 100644 --- a/Makefile +++ b/Makefile @@ -319,7 +319,7 @@ it: # generates test mocks .PHONY: generate-test-mocks generate-test-mocks: - go run github.com/vektra/mockery/v2 --dir ./pkg/models --name '.*ReaderWriter' --outpkg mocks --output ./pkg/models/mocks + go run github.com/vektra/mockery/v2 # runs server # sets the config file to use the local dev config diff --git a/README.md b/README.md index 4debcfa9169..9cc62f9b970 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,4 @@ # Stash -https://stashapp.cc [![Build](https://github.com/stashapp/stash/actions/workflows/build.yml/badge.svg?branch=develop&event=push)](https://github.com/stashapp/stash/actions/workflows/build.yml) [![Docker pulls](https://img.shields.io/docker/pulls/stashapp/stash.svg)](https://hub.docker.com/r/stashapp/stash 'DockerHub') @@ -21,7 +20,7 @@ https://stashapp.cc You can [watch a SFW demo video](https://vimeo.com/545323354) to see it in action. -For further information you can [read the in-app manual](ui/v2.5/src/docs/en). +For further information you can consult the [documentation](https://docs.stashapp.cc) or [read the in-app manual](ui/v2.5/src/docs/en). # Installing Stash @@ -49,10 +48,10 @@ Many community-maintained scrapers are available for download from [CommunityScr [StashDB](http://stashdb.org) is the canonical instance of our open source metadata API, [stash-box](https://github.com/stashapp/stash-box). # Translation -[![Translate](https://translate.stashapp.cc/widgets/stash/-/stash-desktop-client/svg-badge.svg)](https://translate.stashapp.cc/engage/stash/) +[![Translate](https://hosted.weblate.org/widget/stashapp/stash/svg-badge.svg)](https://hosted.weblate.org/engage/stashapp/) 🇧🇷 🇨🇳 🇩🇰 🇳🇱 🇬🇧 🇪🇪 🇫🇮 🇫🇷 🇩🇪 🇮🇹 🇯🇵 🇰🇷 🇵🇱 🇷🇺 🇪🇸 🇸🇪 🇹🇼 🇹🇷 -Stash is available in 25 languages (so far!) and it could be in your language too. If you want to help us translate Stash into your language, you can make an account at [translate.stashapp.cc](https://translate.stashapp.cc/projects/stash/stash-desktop-client/) to get started contributing new languages or improving existing ones. Thanks! +Stash is available in 25 languages (so far!) and it could be in your language too. We use Weblate to coordinate community translations. If you want to help us translate Stash into your language, you can make an account at [Stash's Weblate](https://hosted.weblate.org/projects/stashapp/stash/) to get started contributing new languages or improving existing ones. Thanks! # Support (FAQ) diff --git a/cmd/phasher/main.go b/cmd/phasher/main.go index f4648b74e2f..99a7491929f 100644 --- a/cmd/phasher/main.go +++ b/cmd/phasher/main.go @@ -8,8 +8,8 @@ import ( flag "github.com/spf13/pflag" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/hash/videophash" + "github.com/stashapp/stash/pkg/models" ) func customUsage() { @@ -28,8 +28,8 @@ func printPhash(ff *ffmpeg.FFMpeg, ffp ffmpeg.FFProbe, inputfile string, quiet * // videoFile.Path (from BaseFile) // videoFile.Duration // The rest of the struct isn't needed. - vf := &file.VideoFile{ - BaseFile: &file.BaseFile{Path: inputfile}, + vf := &models.VideoFile{ + BaseFile: &models.BaseFile{Path: inputfile}, Duration: ffvideoFile.FileDuration, } diff --git a/gqlgen.yml b/gqlgen.yml index 2439ebc7ca0..ec9feab24a6 100644 --- a/gqlgen.yml +++ b/gqlgen.yml @@ -23,6 +23,12 @@ autobind: models: # Scalars + ID: + model: + - github.com/99designs/gqlgen/graphql.ID + - github.com/99designs/gqlgen/graphql.IntID + - github.com/stashapp/stash/pkg/models.FileID + - github.com/stashapp/stash/pkg/models.FolderID Int64: model: github.com/99designs/gqlgen/graphql.Int64 Timestamp: @@ -33,6 +39,30 @@ models: fields: title: resolver: true + # override models, from internal/api/models.go + BaseFile: + model: github.com/stashapp/stash/internal/api.BaseFile + GalleryFile: + model: github.com/stashapp/stash/internal/api.GalleryFile + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice + VideoFile: + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice + # override float fields - #1572 + duration: + fieldName: DurationFinite + frame_rate: + fieldName: FrameRateFinite + ImageFile: + fields: + # override fingerprint field + fingerprints: + fieldName: FingerprintSlice # autobind on config causes generation issues BlobsStorageType: model: github.com/stashapp/stash/internal/manager/config.BlobsStorageType @@ -118,4 +148,6 @@ models: model: github.com/stashapp/stash/internal/identify.MetadataOptions ScraperSourceInput: model: github.com/stashapp/stash/pkg/scraper.Source + SavedFindFilterType: + model: github.com/stashapp/stash/pkg/models.FindFilterType diff --git a/graphql/documents/data/filter.graphql b/graphql/documents/data/filter.graphql index 4c6236668ad..1ced5a44198 100644 --- a/graphql/documents/data/filter.graphql +++ b/graphql/documents/data/filter.graphql @@ -2,5 +2,13 @@ fragment SavedFilterData on SavedFilter { id mode name - filter + find_filter { + q + page + per_page + sort + direction + } + object_filter + ui_options } diff --git a/graphql/documents/data/performer-slim.graphql b/graphql/documents/data/performer-slim.graphql index 65019b98b52..5fbd1a2eb6d 100644 --- a/graphql/documents/data/performer-slim.graphql +++ b/graphql/documents/data/performer-slim.graphql @@ -34,3 +34,10 @@ fragment SlimPerformerData on Performer { death_date weight } + +fragment SelectPerformerData on Performer { + id + name + disambiguation + alias_list +} diff --git a/graphql/documents/queries/misc.graphql b/graphql/documents/queries/misc.graphql index 791392fb00d..61354be534d 100644 --- a/graphql/documents/queries/misc.graphql +++ b/graphql/documents/queries/misc.graphql @@ -6,15 +6,6 @@ query MarkerStrings($q: String, $sort: String) { } } -query AllPerformersForFilter { - allPerformers { - id - name - disambiguation - alias_list - } -} - query AllStudiosForFilter { allStudios { id diff --git a/graphql/documents/queries/performer.graphql b/graphql/documents/queries/performer.graphql index cc25752ac4a..3c3f689c326 100644 --- a/graphql/documents/queries/performer.graphql +++ b/graphql/documents/queries/performer.graphql @@ -1,8 +1,13 @@ query FindPerformers( $filter: FindFilterType $performer_filter: PerformerFilterType + $performer_ids: [Int!] ) { - findPerformers(filter: $filter, performer_filter: $performer_filter) { + findPerformers( + filter: $filter + performer_filter: $performer_filter + performer_ids: $performer_ids + ) { count performers { ...PerformerData @@ -15,3 +20,20 @@ query FindPerformer($id: ID!) { ...PerformerData } } + +query FindPerformersForSelect( + $filter: FindFilterType + $performer_filter: PerformerFilterType + $performer_ids: [Int!] +) { + findPerformers( + filter: $filter + performer_filter: $performer_filter + performer_ids: $performer_ids + ) { + count + performers { + ...SelectPerformerData + } + } +} diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 52f97adab31..4c011ad0db2 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -60,6 +60,7 @@ type Query { findPerformers( performer_filter: PerformerFilterType filter: FindFilterType + performer_ids: [Int!] ): FindPerformersResultType! "Find a studio by ID" @@ -223,11 +224,13 @@ type Query { allSceneMarkers: [SceneMarker!]! allImages: [Image!]! allGalleries: [Gallery!]! - allPerformers: [Performer!]! allStudios: [Studio!]! allMovies: [Movie!]! allTags: [Tag!]! + # @deprecated + allPerformers: [Performer!]! + # Get everything with minimal metadata # Version diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 13165fba875..f0b19026471 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -12,6 +12,17 @@ input FindFilterType { direction: SortDirectionEnum } +type SavedFindFilterType { + q: String + page: Int + """ + use per_page = -1 to indicate all results. Defaults to 25. + """ + per_page: Int + sort: String + direction: SortDirectionEnum +} + enum ResolutionEnum { "144p" VERY_LOW @@ -604,6 +615,13 @@ type SavedFilter { name: String! "JSON-encoded filter string" filter: String! + @deprecated(reason: "use find_filter and object_filter instead") + find_filter: SavedFindFilterType + # maps to any of the AnyFilterInput types + # using a generic Map instead of creating and maintaining match types for inputs + object_filter: Map + # generic map for ui options + ui_options: Map } input SaveFilterInput { @@ -611,8 +629,10 @@ input SaveFilterInput { id: ID mode: FilterMode! name: String! - "JSON-encoded filter string" - filter: String! + find_filter: FindFilterType + object_filter: Map + # generic map for ui options + ui_options: Map } input DestroyFilterInput { @@ -621,6 +641,9 @@ input DestroyFilterInput { input SetDefaultFilterInput { mode: FilterMode! - "JSON-encoded filter string - null to clear" - filter: String + "null to clear" + find_filter: FindFilterType + object_filter: Map + # generic map for ui options + ui_options: Map } diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index 30b865632b9..d98c663a146 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -1,14 +1,14 @@ -//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene -//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery -//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image -//go:generate go run -mod=vendor github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer -//go:generate go run -mod=vendor github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio -//go:generate go run -mod=vendor github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag -//go:generate go run -mod=vendor github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie -//go:generate go run -mod=vendor github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/file.ID github.com/stashapp/stash/pkg/file.File -//go:generate go run -mod=vendor github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID -//go:generate go run -mod=vendor github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID -//go:generate go run -mod=vendor github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/file.ID +//go:generate go run github.com/vektah/dataloaden SceneLoader int *github.com/stashapp/stash/pkg/models.Scene +//go:generate go run github.com/vektah/dataloaden GalleryLoader int *github.com/stashapp/stash/pkg/models.Gallery +//go:generate go run github.com/vektah/dataloaden ImageLoader int *github.com/stashapp/stash/pkg/models.Image +//go:generate go run github.com/vektah/dataloaden PerformerLoader int *github.com/stashapp/stash/pkg/models.Performer +//go:generate go run github.com/vektah/dataloaden StudioLoader int *github.com/stashapp/stash/pkg/models.Studio +//go:generate go run github.com/vektah/dataloaden TagLoader int *github.com/stashapp/stash/pkg/models.Tag +//go:generate go run github.com/vektah/dataloaden MovieLoader int *github.com/stashapp/stash/pkg/models.Movie +//go:generate go run github.com/vektah/dataloaden FileLoader github.com/stashapp/stash/pkg/models.FileID github.com/stashapp/stash/pkg/models.File +//go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID +//go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID +//go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID package loaders @@ -18,7 +18,6 @@ import ( "time" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -216,8 +215,8 @@ func (m Middleware) fetchMovies(ctx context.Context) func(keys []int) ([]*models } } -func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file.File, []error) { - return func(keys []file.ID) (ret []file.File, errs []error) { +func (m Middleware) fetchFiles(ctx context.Context) func(keys []models.FileID) ([]models.File, []error) { + return func(keys []models.FileID) (ret []models.File, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.File.Find(ctx, keys...) @@ -227,8 +226,8 @@ func (m Middleware) fetchFiles(ctx context.Context) func(keys []file.ID) ([]file } } -func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Scene.GetManyFileIDs(ctx, keys) @@ -238,8 +237,8 @@ func (m Middleware) fetchScenesFileIDs(ctx context.Context) func(keys []int) ([] } } -func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Image.GetManyFileIDs(ctx, keys) @@ -249,8 +248,8 @@ func (m Middleware) fetchImagesFileIDs(ctx context.Context) func(keys []int) ([] } } -func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]file.ID, []error) { - return func(keys []int) (ret [][]file.ID, errs []error) { +func (m Middleware) fetchGalleriesFileIDs(ctx context.Context) func(keys []int) ([][]models.FileID, []error) { + return func(keys []int) (ret [][]models.FileID, errs []error) { err := m.withTxn(ctx, func(ctx context.Context) error { var err error ret, err = m.Repository.Gallery.GetManyFileIDs(ctx, keys) diff --git a/internal/api/loaders/fileloader_gen.go b/internal/api/loaders/fileloader_gen.go index 348dcbb7f09..6289e7a50cf 100644 --- a/internal/api/loaders/fileloader_gen.go +++ b/internal/api/loaders/fileloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // FileLoaderConfig captures the config to create a new FileLoader type FileLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []file.ID) ([]file.File, []error) + Fetch func(keys []models.FileID) ([]models.File, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewFileLoader(config FileLoaderConfig) *FileLoader { // FileLoader batches and caches requests type FileLoader struct { // this method provides the data for the loader - fetch func(keys []file.ID) ([]file.File, []error) + fetch func(keys []models.FileID) ([]models.File, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type FileLoader struct { // INTERNAL // lazily created cache - cache map[file.ID]file.File + cache map[models.FileID]models.File // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -55,26 +55,26 @@ type FileLoader struct { } type fileLoaderBatch struct { - keys []file.ID - data []file.File + keys []models.FileID + data []models.File error []error closing bool done chan struct{} } // Load a File by key, batching and caching will be applied automatically -func (l *FileLoader) Load(key file.ID) (file.File, error) { +func (l *FileLoader) Load(key models.FileID) (models.File, error) { return l.LoadThunk(key)() } // LoadThunk returns a function that when called will block waiting for a File. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { +func (l *FileLoader) LoadThunk(key models.FileID) func() (models.File, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() (file.File, error) { + return func() (models.File, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() (file.File, error) { + return func() (models.File, error) { <-batch.done - var data file.File + var data models.File if pos < len(batch.data) { data = batch.data[pos] } @@ -113,14 +113,14 @@ func (l *FileLoader) LoadThunk(key file.ID) func() (file.File, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) { - results := make([]func() (file.File, error), len(keys)) +func (l *FileLoader) LoadAll(keys []models.FileID) ([]models.File, []error) { + results := make([]func() (models.File, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - files := make([]file.File, len(keys)) + files := make([]models.File, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { files[i], errors[i] = thunk() @@ -131,13 +131,13 @@ func (l *FileLoader) LoadAll(keys []file.ID) ([]file.File, []error) { // LoadAllThunk returns a function that when called will block waiting for a Files. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) { - results := make([]func() (file.File, error), len(keys)) +func (l *FileLoader) LoadAllThunk(keys []models.FileID) func() ([]models.File, []error) { + results := make([]func() (models.File, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([]file.File, []error) { - files := make([]file.File, len(keys)) + return func() ([]models.File, []error) { + files := make([]models.File, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { files[i], errors[i] = thunk() @@ -149,7 +149,7 @@ func (l *FileLoader) LoadAllThunk(keys []file.ID) func() ([]file.File, []error) // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *FileLoader) Prime(key file.ID, value file.File) bool { +func (l *FileLoader) Prime(key models.FileID, value models.File) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { @@ -160,22 +160,22 @@ func (l *FileLoader) Prime(key file.ID, value file.File) bool { } // Clear the value at key from the cache, if it exists -func (l *FileLoader) Clear(key file.ID) { +func (l *FileLoader) Clear(key models.FileID) { l.mu.Lock() delete(l.cache, key) l.mu.Unlock() } -func (l *FileLoader) unsafeSet(key file.ID, value file.File) { +func (l *FileLoader) unsafeSet(key models.FileID, value models.File) { if l.cache == nil { - l.cache = map[file.ID]file.File{} + l.cache = map[models.FileID]models.File{} } l.cache[key] = value } // keyIndex will return the location of the key in the batch, if its not found // it will add the key to the batch -func (b *fileLoaderBatch) keyIndex(l *FileLoader, key file.ID) int { +func (b *fileLoaderBatch) keyIndex(l *FileLoader, key models.FileID) int { for i, existingKey := range b.keys { if key == existingKey { return i diff --git a/internal/api/loaders/galleryfileidsloader_gen.go b/internal/api/loaders/galleryfileidsloader_gen.go index 808cfbf0fa9..e3c53903683 100644 --- a/internal/api/loaders/galleryfileidsloader_gen.go +++ b/internal/api/loaders/galleryfileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // GalleryFileIDsLoaderConfig captures the config to create a new GalleryFileIDsLoader type GalleryFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewGalleryFileIDsLoader(config GalleryFileIDsLoaderConfig) *GalleryFileIDsL // GalleryFileIDsLoader batches and caches requests type GalleryFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type GalleryFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type GalleryFileIDsLoader struct { type galleryFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *GalleryFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *GalleryFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *GalleryFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *GalleryFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *GalleryFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *GalleryFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *GalleryFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *GalleryFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *GalleryFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *GalleryFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/loaders/imagefileidsloader_gen.go b/internal/api/loaders/imagefileidsloader_gen.go index 7e633d8ef01..e19d458ad81 100644 --- a/internal/api/loaders/imagefileidsloader_gen.go +++ b/internal/api/loaders/imagefileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // ImageFileIDsLoaderConfig captures the config to create a new ImageFileIDsLoader type ImageFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewImageFileIDsLoader(config ImageFileIDsLoaderConfig) *ImageFileIDsLoader // ImageFileIDsLoader batches and caches requests type ImageFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type ImageFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type ImageFileIDsLoader struct { type imageFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *ImageFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *ImageFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *ImageFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *ImageFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *ImageFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *ImageFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *ImageFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *ImageFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *ImageFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *ImageFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/loaders/scenefileidsloader_gen.go b/internal/api/loaders/scenefileidsloader_gen.go index 663be2c6fd3..16e1690c4cd 100644 --- a/internal/api/loaders/scenefileidsloader_gen.go +++ b/internal/api/loaders/scenefileidsloader_gen.go @@ -6,13 +6,13 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // SceneFileIDsLoaderConfig captures the config to create a new SceneFileIDsLoader type SceneFileIDsLoaderConfig struct { // Fetch is a method that provides the data for the loader - Fetch func(keys []int) ([][]file.ID, []error) + Fetch func(keys []int) ([][]models.FileID, []error) // Wait is how long wait before sending a batch Wait time.Duration @@ -33,7 +33,7 @@ func NewSceneFileIDsLoader(config SceneFileIDsLoaderConfig) *SceneFileIDsLoader // SceneFileIDsLoader batches and caches requests type SceneFileIDsLoader struct { // this method provides the data for the loader - fetch func(keys []int) ([][]file.ID, []error) + fetch func(keys []int) ([][]models.FileID, []error) // how long to done before sending a batch wait time.Duration @@ -44,7 +44,7 @@ type SceneFileIDsLoader struct { // INTERNAL // lazily created cache - cache map[int][]file.ID + cache map[int][]models.FileID // the current batch. keys will continue to be collected until timeout is hit, // then everything will be sent to the fetch method and out to the listeners @@ -56,25 +56,25 @@ type SceneFileIDsLoader struct { type sceneFileIDsLoaderBatch struct { keys []int - data [][]file.ID + data [][]models.FileID error []error closing bool done chan struct{} } -// Load a ID by key, batching and caching will be applied automatically -func (l *SceneFileIDsLoader) Load(key int) ([]file.ID, error) { +// Load a FileID by key, batching and caching will be applied automatically +func (l *SceneFileIDsLoader) Load(key int) ([]models.FileID, error) { return l.LoadThunk(key)() } -// LoadThunk returns a function that when called will block waiting for a ID. +// LoadThunk returns a function that when called will block waiting for a FileID. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { +func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]models.FileID, error) { l.mu.Lock() if it, ok := l.cache[key]; ok { l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { return it, nil } } @@ -85,10 +85,10 @@ func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { pos := batch.keyIndex(l, key) l.mu.Unlock() - return func() ([]file.ID, error) { + return func() ([]models.FileID, error) { <-batch.done - var data []file.ID + var data []models.FileID if pos < len(batch.data) { data = batch.data[pos] } @@ -113,49 +113,49 @@ func (l *SceneFileIDsLoader) LoadThunk(key int) func() ([]file.ID, error) { // LoadAll fetches many keys at once. It will be broken into appropriate sized // sub batches depending on how the loader is configured -func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *SceneFileIDsLoader) LoadAll(keys []int) ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - iDs := make([][]file.ID, len(keys)) + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } -// LoadAllThunk returns a function that when called will block waiting for a IDs. +// LoadAllThunk returns a function that when called will block waiting for a FileIDs. // This method should be used if you want one goroutine to make requests to many // different data loaders without blocking until the thunk is called. -func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]file.ID, []error) { - results := make([]func() ([]file.ID, error), len(keys)) +func (l *SceneFileIDsLoader) LoadAllThunk(keys []int) func() ([][]models.FileID, []error) { + results := make([]func() ([]models.FileID, error), len(keys)) for i, key := range keys { results[i] = l.LoadThunk(key) } - return func() ([][]file.ID, []error) { - iDs := make([][]file.ID, len(keys)) + return func() ([][]models.FileID, []error) { + fileIDs := make([][]models.FileID, len(keys)) errors := make([]error, len(keys)) for i, thunk := range results { - iDs[i], errors[i] = thunk() + fileIDs[i], errors[i] = thunk() } - return iDs, errors + return fileIDs, errors } } // Prime the cache with the provided key and value. If the key already exists, no change is made // and false is returned. // (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) -func (l *SceneFileIDsLoader) Prime(key int, value []file.ID) bool { +func (l *SceneFileIDsLoader) Prime(key int, value []models.FileID) bool { l.mu.Lock() var found bool if _, found = l.cache[key]; !found { // make a copy when writing to the cache, its easy to pass a pointer in from a loop var // and end up with the whole cache pointing to the same value. - cpy := make([]file.ID, len(value)) + cpy := make([]models.FileID, len(value)) copy(cpy, value) l.unsafeSet(key, cpy) } @@ -170,9 +170,9 @@ func (l *SceneFileIDsLoader) Clear(key int) { l.mu.Unlock() } -func (l *SceneFileIDsLoader) unsafeSet(key int, value []file.ID) { +func (l *SceneFileIDsLoader) unsafeSet(key int, value []models.FileID) { if l.cache == nil { - l.cache = map[int][]file.ID{} + l.cache = map[int][]models.FileID{} } l.cache[key] = value } diff --git a/internal/api/models.go b/internal/api/models.go index 92713a56e8c..03c20ee4396 100644 --- a/internal/api/models.go +++ b/internal/api/models.go @@ -9,9 +9,16 @@ import ( "github.com/99designs/gqlgen/graphql" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) +type BaseFile interface{} + +type GalleryFile struct { + *models.BaseFile +} + var ErrTimestamp = errors.New("cannot parse Timestamp") func MarshalTimestamp(t time.Time) graphql.Marshaler { diff --git a/internal/api/resolver.go b/internal/api/resolver.go index ff74a4456f7..ea0bd256c22 100644 --- a/internal/api/resolver.go +++ b/internal/api/resolver.go @@ -82,6 +82,9 @@ func (r *Resolver) Subscription() SubscriptionResolver { func (r *Resolver) Tag() TagResolver { return &tagResolver{r} } +func (r *Resolver) SavedFilter() SavedFilterResolver { + return &savedFilterResolver{r} +} type mutationResolver struct{ *Resolver } type queryResolver struct{ *Resolver } @@ -96,6 +99,7 @@ type imageResolver struct{ *Resolver } type studioResolver struct{ *Resolver } type movieResolver struct{ *Resolver } type tagResolver struct{ *Resolver } +type savedFilterResolver struct{ *Resolver } func (r *Resolver) withTxn(ctx context.Context, fn func(ctx context.Context) error) error { return txn.WithTxn(ctx, r.txnManager, fn) diff --git a/internal/api/resolver_model_gallery.go b/internal/api/resolver_model_gallery.go index 8157404dcf7..e7c0cd6a04c 100644 --- a/internal/api/resolver_model_gallery.go +++ b/internal/api/resolver_model_gallery.go @@ -2,18 +2,16 @@ package api import ( "context" - "strconv" "time" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/manager/config" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) -func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (file.File, error) { +func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Gallery) (models.File, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { @@ -26,7 +24,7 @@ func (r *galleryResolver) getPrimaryFile(ctx context.Context, obj *models.Galler return nil, nil } -func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]file.File, error) { +func (r *galleryResolver) getFiles(ctx context.Context, obj *models.Gallery) ([]models.File, error) { fileIDs, err := loaders.From(ctx).GalleryFiles.Load(obj.ID) if err != nil { return nil, err @@ -45,34 +43,20 @@ func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*Ga ret := make([]*GalleryFile, len(files)) for i, f := range files { - base := f.Base() ret[i] = &GalleryFile{ - ID: strconv.Itoa(int(base.ID)), - Path: base.Path, - Basename: base.Basename, - ParentFolderID: strconv.Itoa(int(base.ParentFolderID)), - ModTime: base.ModTime, - Size: base.Size, - CreatedAt: base.CreatedAt, - UpdatedAt: base.UpdatedAt, - Fingerprints: resolveFingerprints(base), - } - - if base.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*base.ZipFileID)) - ret[i].ZipFileID = &zipFileID + BaseFile: f.Base(), } } return ret, nil } -func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Folder, error) { +func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*models.Folder, error) { if obj.FolderID == nil { return nil, nil } - var ret *file.Folder + var ret *models.Folder if err := r.withReadTxn(ctx, func(ctx context.Context) error { var err error @@ -91,25 +75,7 @@ func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Fol return nil, nil } - rr := &Folder{ - ID: ret.ID.String(), - Path: ret.Path, - ModTime: ret.ModTime, - CreatedAt: ret.CreatedAt, - UpdatedAt: ret.UpdatedAt, - } - - if ret.ParentFolderID != nil { - pfidStr := ret.ParentFolderID.String() - rr.ParentFolderID = &pfidStr - } - - if ret.ZipFileID != nil { - zfidStr := ret.ZipFileID.String() - rr.ZipFileID = &zfidStr - } - - return rr, nil + return ret, nil } func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) { diff --git a/internal/api/resolver_model_image.go b/internal/api/resolver_model_image.go index 9bfadafc7a4..246ff8b4450 100644 --- a/internal/api/resolver_model_image.go +++ b/internal/api/resolver_model_image.go @@ -3,57 +3,35 @@ package api import ( "context" "fmt" - "strconv" "time" "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -func convertImageFile(f *file.ImageFile) *ImageFile { - ret := &ImageFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Size: f.Size, - Width: f.Width, - Height: f.Height, - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), +func convertVisualFile(f models.File) (models.VisualFile, error) { + vf, ok := f.(models.VisualFile) + if !ok { + return nil, fmt.Errorf("file %s is not a visual file", f.Base().Path) } - - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret.ZipFileID = &zipFileID - } - - return ret + return vf, nil } -func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (file.VisualFile, error) { +func (r *imageResolver) getPrimaryFile(ctx context.Context, obj *models.Image) (models.VisualFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { return nil, err } - asFrame, ok := f.(file.VisualFile) - if !ok { - return nil, fmt.Errorf("file %T is not an frame", f) - } - - return asFrame, nil + return convertVisualFile(f) } return nil, nil } -func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]file.File, error) { +func (r *imageResolver) getFiles(ctx context.Context, obj *models.Image) ([]models.File, error) { fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) if err != nil { return nil, err @@ -88,30 +66,21 @@ func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFile }, nil } -func convertVisualFile(f file.File) VisualFile { - switch f := f.(type) { - case *file.ImageFile: - return convertImageFile(f) - case *file.VideoFile: - return convertVideoFile(f) - default: - panic(fmt.Sprintf("unknown file type %T", f)) - } -} - -func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]VisualFile, error) { - fileIDs, err := loaders.From(ctx).ImageFiles.Load(obj.ID) +func (r *imageResolver) VisualFiles(ctx context.Context, obj *models.Image) ([]models.VisualFile, error) { + files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) - ret := make([]VisualFile, len(files)) + ret := make([]models.VisualFile, len(files)) for i, f := range files { - ret[i] = convertVisualFile(f) + ret[i], err = convertVisualFile(f) + if err != nil { + return nil, err + } } - return ret, firstError(errs) + return ret, nil } func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, error) { @@ -122,24 +91,22 @@ func (r *imageResolver) Date(ctx context.Context, obj *models.Image) (*string, e return nil, nil } -func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) { +func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*models.ImageFile, error) { files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - var ret []*ImageFile + var ret []*models.ImageFile for _, f := range files { // filter out non-image files - imageFile, ok := f.(*file.ImageFile) + imageFile, ok := f.(*models.ImageFile) if !ok { continue } - thisFile := convertImageFile(imageFile) - - ret = append(ret, thisFile) + ret = append(ret, imageFile) } return ret, nil diff --git a/internal/api/resolver_model_saved_filter.go b/internal/api/resolver_model_saved_filter.go new file mode 100644 index 00000000000..5e1131ab347 --- /dev/null +++ b/internal/api/resolver_model_saved_filter.go @@ -0,0 +1,11 @@ +package api + +import ( + "context" + + "github.com/stashapp/stash/pkg/models" +) + +func (r *savedFilterResolver) Filter(ctx context.Context, obj *models.SavedFilter) (string, error) { + return "", nil +} diff --git a/internal/api/resolver_model_scene.go b/internal/api/resolver_model_scene.go index 9d5b41725ce..27ccaf33b85 100644 --- a/internal/api/resolver_model_scene.go +++ b/internal/api/resolver_model_scene.go @@ -9,50 +9,28 @@ import ( "github.com/stashapp/stash/internal/api/loaders" "github.com/stashapp/stash/internal/api/urlbuilders" "github.com/stashapp/stash/internal/manager" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -func convertVideoFile(f *file.VideoFile) *VideoFile { - ret := &VideoFile{ - ID: strconv.Itoa(int(f.ID)), - Path: f.Path, - Basename: f.Basename, - ParentFolderID: strconv.Itoa(int(f.ParentFolderID)), - ModTime: f.ModTime, - Format: f.Format, - Size: f.Size, - Duration: handleFloat64Value(f.Duration), - VideoCodec: f.VideoCodec, - AudioCodec: f.AudioCodec, - Width: f.Width, - Height: f.Height, - FrameRate: handleFloat64Value(f.FrameRate), - BitRate: int(f.BitRate), - CreatedAt: f.CreatedAt, - UpdatedAt: f.UpdatedAt, - Fingerprints: resolveFingerprints(f.Base()), - } - - if f.ZipFileID != nil { - zipFileID := strconv.Itoa(int(*f.ZipFileID)) - ret.ZipFileID = &zipFileID +func convertVideoFile(f models.File) (*models.VideoFile, error) { + vf, ok := f.(*models.VideoFile) + if !ok { + return nil, fmt.Errorf("file %T is not a video file", f) } - - return ret + return vf, nil } -func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*file.VideoFile, error) { +func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) (*models.VideoFile, error) { if obj.PrimaryFileID != nil { f, err := loaders.From(ctx).FileByID.Load(*obj.PrimaryFileID) if err != nil { return nil, err } - ret, ok := f.(*file.VideoFile) - if !ok { - return nil, fmt.Errorf("file %T is not an image file", f) + ret, err := convertVideoFile(f) + if err != nil { + return nil, err } obj.Files.SetPrimary(ret) @@ -65,26 +43,29 @@ func (r *sceneResolver) getPrimaryFile(ctx context.Context, obj *models.Scene) ( return nil, nil } -func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*file.VideoFile, error) { +func (r *sceneResolver) getFiles(ctx context.Context, obj *models.Scene) ([]*models.VideoFile, error) { fileIDs, err := loaders.From(ctx).SceneFiles.Load(obj.ID) if err != nil { return nil, err } files, errs := loaders.From(ctx).FileByID.LoadAll(fileIDs) - ret := make([]*file.VideoFile, len(files)) - for i, bf := range files { - f, ok := bf.(*file.VideoFile) - if !ok { - return nil, fmt.Errorf("file %T is not a video file", f) - } + err = firstError(errs) + if err != nil { + return nil, err + } - ret[i] = f + ret := make([]*models.VideoFile, len(files)) + for i, f := range files { + ret[i], err = convertVideoFile(f) + if err != nil { + return nil, err + } } obj.Files.Set(ret) - return ret, firstError(errs) + return ret, nil } func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) { @@ -132,19 +113,13 @@ func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.Sc }, nil } -func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) { +func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*models.VideoFile, error) { files, err := r.getFiles(ctx, obj) if err != nil { return nil, err } - ret := make([]*VideoFile, len(files)) - - for i, f := range files { - ret[i] = convertVideoFile(f) - } - - return ret, nil + return files, nil } func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) { @@ -159,28 +134,6 @@ func (r *sceneResolver) Rating100(ctx context.Context, obj *models.Scene) (*int, return obj.Rating, nil } -func resolveFingerprints(f *file.BaseFile) []*Fingerprint { - ret := make([]*Fingerprint, len(f.Fingerprints)) - - for i, fp := range f.Fingerprints { - ret[i] = &Fingerprint{ - Type: fp.Type, - Value: formatFingerprint(fp.Fingerprint), - } - } - - return ret -} - -func formatFingerprint(fp interface{}) string { - switch v := fp.(type) { - case int64: - return strconv.FormatUint(uint64(v), 16) - default: - return fmt.Sprintf("%v", fp) - } -} - func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) { baseURL, _ := ctx.Value(BaseURLCtxKey).(string) config := manager.GetInstance().Config @@ -352,7 +305,7 @@ func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, return nil, nil } - val := f.Fingerprints.Get(file.FingerprintTypePhash) + val := f.Fingerprints.Get(models.FingerprintTypePhash) if val == nil { return nil, nil } diff --git a/internal/api/resolver_mutation_file.go b/internal/api/resolver_mutation_file.go index 0b8b84ea0a2..2fcf66fcf19 100644 --- a/internal/api/resolver_mutation_file.go +++ b/internal/api/resolver_mutation_file.go @@ -8,6 +8,7 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/stringslice" ) @@ -19,7 +20,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) mover.RegisterHooks(ctx, r.txnManager) var ( - folder *file.Folder + folder *models.Folder basename string ) @@ -37,7 +38,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) return fmt.Errorf("invalid folder id %s: %w", *input.DestinationFolderID, err) } - folder, err = folderStore.Find(ctx, file.FolderID(folderID)) + folder, err = folderStore.Find(ctx, models.FolderID(folderID)) if err != nil { return fmt.Errorf("finding destination folder: %w", err) } @@ -82,7 +83,7 @@ func (r *mutationResolver) MoveFiles(ctx context.Context, input MoveFilesInput) } for _, fileIDInt := range fileIDs { - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) f, err := fileStore.Find(ctx, fileID) if err != nil { return fmt.Errorf("finding file %d: %w", fileID, err) @@ -158,7 +159,7 @@ func (r *mutationResolver) DeleteFiles(ctx context.Context, ids []string) (ret b qb := r.repository.File for _, fileIDInt := range fileIDs { - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) f, err := qb.Find(ctx, fileID) if err != nil { return err diff --git a/internal/api/resolver_mutation_gallery.go b/internal/api/resolver_mutation_gallery.go index 368808d2ce6..ebdb94e647c 100644 --- a/internal/api/resolver_mutation_gallery.go +++ b/internal/api/resolver_mutation_gallery.go @@ -199,7 +199,7 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle return nil, fmt.Errorf("converting primary file id: %w", err) } - converted := file.ID(primaryFileID) + converted := models.FileID(primaryFileID) updatedGallery.PrimaryFileID = &converted if err := originalGallery.LoadFiles(ctx, r.repository.Gallery); err != nil { @@ -207,7 +207,7 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle } // ensure that new primary file is associated with gallery - var f file.File + var f models.File for _, ff := range originalGallery.Files.List() { if ff.Base().ID == converted { f = ff diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 6d5c3a88ab5..6ea58e211f3 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -123,7 +123,7 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp return nil, fmt.Errorf("converting primary file id: %w", err) } - converted := file.ID(primaryFileID) + converted := models.FileID(primaryFileID) updatedImage.PrimaryFileID = &converted if err := i.LoadFiles(ctx, r.repository.Image); err != nil { @@ -131,7 +131,7 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp } // ensure that new primary file is associated with image - var f file.File + var f models.File for _, ff := range i.Files.List() { if ff.Base().ID == converted { f = ff diff --git a/internal/api/resolver_mutation_saved_filter.go b/internal/api/resolver_mutation_saved_filter.go index a0514546cf2..89062227068 100644 --- a/internal/api/resolver_mutation_saved_filter.go +++ b/internal/api/resolver_mutation_saved_filter.go @@ -14,12 +14,6 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput return nil, errors.New("name must be non-empty") } - newFilter := models.SavedFilter{ - Mode: input.Mode, - Name: input.Name, - Filter: input.Filter, - } - var id *int if input.ID != nil { idv, err := strconv.Atoi(*input.ID) @@ -32,17 +26,27 @@ func (r *mutationResolver) SaveFilter(ctx context.Context, input SaveFilterInput if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.SavedFilter + f := models.SavedFilter{ + Mode: input.Mode, + Name: input.Name, + FindFilter: input.FindFilter, + ObjectFilter: input.ObjectFilter, + UIOptions: input.UIOptions, + } + if id == nil { - err = qb.Create(ctx, &newFilter) + err = qb.Create(ctx, &f) + ret = &f } else { - newFilter.ID = *id - err = qb.Update(ctx, &newFilter) + f.ID = *id + err = qb.Update(ctx, &f) + ret = &f } + return err }); err != nil { return nil, err } - ret = &newFilter return ret, err } @@ -65,7 +69,7 @@ func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaul if err := r.withTxn(ctx, func(ctx context.Context) error { qb := r.repository.SavedFilter - if input.Filter == nil { + if input.FindFilter == nil && input.ObjectFilter == nil && input.UIOptions == nil { // clearing def, err := qb.FindDefault(ctx, input.Mode) if err != nil { @@ -79,12 +83,12 @@ func (r *mutationResolver) SetDefaultFilter(ctx context.Context, input SetDefaul return nil } - err := qb.SetDefault(ctx, &models.SavedFilter{ - Mode: input.Mode, - Filter: *input.Filter, + return qb.SetDefault(ctx, &models.SavedFilter{ + Mode: input.Mode, + FindFilter: input.FindFilter, + ObjectFilter: input.ObjectFilter, + UIOptions: input.UIOptions, }) - - return err }); err != nil { return false, err } diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index 1846d554d93..12d89c9d8fc 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -56,9 +56,9 @@ func (r *mutationResolver) SceneCreate(ctx context.Context, input SceneCreateInp return nil, fmt.Errorf("converting file ids: %w", err) } - fileIDs := make([]file.ID, len(fileIDsInt)) + fileIDs := make([]models.FileID, len(fileIDsInt)) for i, v := range fileIDsInt { - fileIDs[i] = file.ID(v) + fileIDs[i] = models.FileID(v) } // Populate a new scene from the input @@ -212,7 +212,7 @@ func scenePartialFromInput(input models.SceneUpdateInput, translator changesetTr return nil, fmt.Errorf("converting primary file id: %w", err) } - converted := file.ID(primaryFileID) + converted := models.FileID(primaryFileID) updatedScene.PrimaryFileID = &converted } @@ -300,7 +300,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp } // ensure that new primary file is associated with scene - var f *file.VideoFile + var f *models.VideoFile for _, ff := range originalScene.Files.List() { if ff.ID == newPrimaryFileID { f = ff @@ -575,7 +575,7 @@ func (r *mutationResolver) SceneAssignFile(ctx context.Context, input AssignScen return false, fmt.Errorf("converting file ID: %w", err) } - fileID := file.ID(fileIDInt) + fileID := models.FileID(fileIDInt) if err := r.withTxn(ctx, func(ctx context.Context) error { return r.Resolver.sceneService.AssignFile(ctx, sceneID, fileID) diff --git a/internal/api/resolver_query_find_performer.go b/internal/api/resolver_query_find_performer.go index 437ac8fcf04..a47b7a18dc5 100644 --- a/internal/api/resolver_query_find_performer.go +++ b/internal/api/resolver_query_find_performer.go @@ -23,9 +23,19 @@ func (r *queryResolver) FindPerformer(ctx context.Context, id string) (ret *mode return ret, nil } -func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType) (ret *FindPerformersResultType, err error) { +func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *models.PerformerFilterType, filter *models.FindFilterType, performerIDs []int) (ret *FindPerformersResultType, err error) { if err := r.withReadTxn(ctx, func(ctx context.Context) error { - performers, total, err := r.repository.Performer.Query(ctx, performerFilter, filter) + var performers []*models.Performer + var err error + var total int + + if len(performerIDs) > 0 { + performers, err = r.repository.Performer.FindMany(ctx, performerIDs) + total = len(performers) + } else { + performers, total, err = r.repository.Performer.Query(ctx, performerFilter, filter) + } + if err != nil { return err } @@ -34,6 +44,7 @@ func (r *queryResolver) FindPerformers(ctx context.Context, performerFilter *mod Count: total, Performers: performers, } + return nil }); err != nil { return nil, err diff --git a/internal/api/resolver_query_scraper.go b/internal/api/resolver_query_scraper.go index 7b7694341ba..0220316b2fb 100644 --- a/internal/api/resolver_query_scraper.go +++ b/internal/api/resolver_query_scraper.go @@ -129,7 +129,9 @@ func (r *queryResolver) ScrapeScene(ctx context.Context, scraperID string, scene return nil, err } - filterSceneTags([]*scraper.ScrapedScene{ret}) + if ret != nil { + filterSceneTags([]*scraper.ScrapedScene{ret}) + } return ret, nil } @@ -190,7 +192,9 @@ func (r *queryResolver) ScrapeSceneURL(ctx context.Context, url string) (*scrape return nil, err } - filterSceneTags([]*scraper.ScrapedScene{ret}) + if ret != nil { + filterSceneTags([]*scraper.ScrapedScene{ret}) + } return ret, nil } diff --git a/internal/api/routes_image.go b/internal/api/routes_image.go index 4ea612d3b73..4cc2576718c 100644 --- a/internal/api/routes_image.go +++ b/internal/api/routes_image.go @@ -22,14 +22,14 @@ import ( ) type ImageFinder interface { - Find(ctx context.Context, id int) (*models.Image, error) + models.ImageGetter FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error) } type imageRoutes struct { txnManager txn.Manager imageFinder ImageFinder - fileFinder file.Finder + fileGetter models.FileGetter } func (rs imageRoutes) Routes() chi.Router { @@ -168,7 +168,7 @@ func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler { } if image != nil { - if err := image.LoadPrimaryFile(ctx, rs.fileFinder); err != nil { + if err := image.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { if !errors.Is(err, context.Canceled) { logger.Errorf("error loading primary file for image %d: %v", imageID, err) } diff --git a/internal/api/routes_movie.go b/internal/api/routes_movie.go index a64aae76cf8..400587763b5 100644 --- a/internal/api/routes_movie.go +++ b/internal/api/routes_movie.go @@ -14,9 +14,9 @@ import ( ) type MovieFinder interface { + models.MovieGetter GetFrontImage(ctx context.Context, movieID int) ([]byte, error) GetBackImage(ctx context.Context, movieID int) ([]byte, error) - Find(ctx context.Context, id int) (*models.Movie, error) } type movieRoutes struct { diff --git a/internal/api/routes_performer.go b/internal/api/routes_performer.go index e7631de5b84..d05e5309570 100644 --- a/internal/api/routes_performer.go +++ b/internal/api/routes_performer.go @@ -15,7 +15,7 @@ import ( ) type PerformerFinder interface { - Find(ctx context.Context, id int) (*models.Performer, error) + models.PerformerGetter GetImage(ctx context.Context, performerID int) ([]byte, error) } diff --git a/internal/api/routes_scene.go b/internal/api/routes_scene.go index 43d37da36e0..e0584d6888b 100644 --- a/internal/api/routes_scene.go +++ b/internal/api/routes_scene.go @@ -12,40 +12,43 @@ import ( "github.com/stashapp/stash/internal/manager" "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type SceneFinder interface { - manager.SceneCoverGetter + models.SceneGetter - scene.IDFinder FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) + GetCover(ctx context.Context, sceneID int) ([]byte, error) } type SceneMarkerFinder interface { - Find(ctx context.Context, id int) (*models.SceneMarker, error) + models.SceneMarkerGetter FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) } +type SceneMarkerTagFinder interface { + models.TagGetter + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) +} + type CaptionFinder interface { - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) + GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) } type sceneRoutes struct { txnManager txn.Manager sceneFinder SceneFinder - fileFinder file.Finder + fileGetter models.FileGetter captionFinder CaptionFinder sceneMarkerFinder SceneMarkerFinder - tagFinder scene.MarkerTagFinder + tagFinder SceneMarkerTagFinder } func (rs sceneRoutes) Routes() chi.Router { @@ -574,7 +577,7 @@ func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler { scene, _ = qb.Find(ctx, sceneID) if scene != nil { - if err := scene.LoadPrimaryFile(ctx, rs.fileFinder); err != nil { + if err := scene.LoadPrimaryFile(ctx, rs.fileGetter); err != nil { if !errors.Is(err, context.Canceled) { logger.Errorf("error loading primary file for scene %d: %v", sceneID, err) } diff --git a/internal/api/routes_studio.go b/internal/api/routes_studio.go index ca4e580f6a7..1cce3938532 100644 --- a/internal/api/routes_studio.go +++ b/internal/api/routes_studio.go @@ -11,13 +11,12 @@ import ( "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type StudioFinder interface { - studio.Finder + models.StudioGetter GetImage(ctx context.Context, studioID int) ([]byte, error) } diff --git a/internal/api/routes_tag.go b/internal/api/routes_tag.go index d8837da80c9..9ccf11a11c9 100644 --- a/internal/api/routes_tag.go +++ b/internal/api/routes_tag.go @@ -11,13 +11,12 @@ import ( "github.com/stashapp/stash/internal/static" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type TagFinder interface { - tag.Finder + models.TagGetter GetImage(ctx context.Context, tagID int) ([]byte, error) } diff --git a/internal/api/server.go b/internal/api/server.go index 6eec5b524e1..b909914cdfd 100644 --- a/internal/api/server.go +++ b/internal/api/server.go @@ -151,7 +151,7 @@ func Start() error { r.Mount("/scene", sceneRoutes{ txnManager: txnManager, sceneFinder: txnManager.Scene, - fileFinder: txnManager.File, + fileGetter: txnManager.File, captionFinder: txnManager.File, sceneMarkerFinder: txnManager.SceneMarker, tagFinder: txnManager.Tag, @@ -159,7 +159,7 @@ func Start() error { r.Mount("/image", imageRoutes{ txnManager: txnManager, imageFinder: txnManager.Image, - fileFinder: txnManager.File, + fileGetter: txnManager.File, }.Routes()) r.Mount("/studio", studioRoutes{ txnManager: txnManager, diff --git a/internal/api/types.go b/internal/api/types.go index 13d86f975c7..79b4aa02002 100644 --- a/internal/api/types.go +++ b/internal/api/types.go @@ -18,14 +18,6 @@ func handleFloat64(v float64) *float64 { return &v } -func handleFloat64Value(v float64) float64 { - if math.IsInf(v, 0) || math.IsNaN(v) { - return 0 - } - - return v -} - func translateUpdateIDs(strIDs []string, mode models.RelationshipUpdateMode) (*models.UpdateIDs, error) { ids, err := stringslice.StringSliceToIntSlice(strIDs) if err != nil { diff --git a/internal/autotag/gallery.go b/internal/autotag/gallery.go index d2a8c2c5d29..f768a31dd11 100644 --- a/internal/autotag/gallery.go +++ b/internal/autotag/gallery.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type GalleryFinderUpdater interface { + models.GalleryQueryer + models.GalleryUpdater +} + type GalleryPerformerUpdater interface { models.PerformerIDLoader - gallery.PartialUpdater + models.GalleryUpdater } type GalleryTagUpdater interface { models.TagIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { @@ -39,7 +44,7 @@ func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger { } // GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path. -func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getGalleryFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -63,7 +68,7 @@ func GalleryPerformers(ctx context.Context, s *models.Gallery, rw GalleryPerform // GalleryStudios tags the provided gallery with the first studio whose name matches the gallery's path. // // Gallerys will not be tagged if studio is already set. -func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -77,7 +82,7 @@ func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpda } // GalleryTags tags the provided gallery with tags whose name matches the gallery's path. -func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func GalleryTags(ctx context.Context, s *models.Gallery, rw GalleryTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getGalleryFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/image.go b/internal/autotag/image.go index 404640786d4..d28960f3caf 100644 --- a/internal/autotag/image.go +++ b/internal/autotag/image.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type ImageFinderUpdater interface { + models.ImageQueryer + models.ImageUpdater +} + type ImagePerformerUpdater interface { models.PerformerIDLoader - image.PartialUpdater + models.ImageUpdater } type ImageTagUpdater interface { models.TagIDLoader - image.PartialUpdater + models.ImageUpdater } func getImageFileTagger(s *models.Image, cache *match.Cache) tagger { @@ -30,7 +35,7 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger { } // ImagePerformers tags the provided image with performers whose name matches the image's path. -func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getImageFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -54,7 +59,7 @@ func ImagePerformers(ctx context.Context, s *models.Image, rw ImagePerformerUpda // ImageStudios tags the provided image with the first studio whose name matches the image's path. // // Images will not be tagged if studio is already set. -func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -68,7 +73,7 @@ func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, s } // ImageTags tags the provided image with tags whose name matches the image's path. -func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func ImageTags(ctx context.Context, s *models.Image, rw ImageTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getImageFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go index 1c7b0ee2d55..774a7738bab 100644 --- a/internal/autotag/integration_test.go +++ b/internal/autotag/integration_test.go @@ -10,7 +10,6 @@ import ( "path/filepath" "testing" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sqlite" "github.com/stashapp/stash/pkg/txn" @@ -99,7 +98,7 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error { func createStudio(ctx context.Context, qb models.StudioWriter, name string) (*models.Studio, error) { // create the studio studio := models.Studio{ - Name: name, + Name: name, } err := qb.Create(ctx, &studio) @@ -124,12 +123,12 @@ func createTag(ctx context.Context, qb models.TagWriter) error { return nil } -func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the scenes scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt) for _, fn := range scenePatterns { - f, err := createSceneFile(ctx, fn, folderStore, fileStore) + f, err := createSceneFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -141,7 +140,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore } for _, fn := range falseScenePatterns { - f, err := createSceneFile(ctx, fn, folderStore, fileStore) + f, err := createSceneFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -154,7 +153,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore // add organized scenes for _, fn := range scenePatterns { - f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -168,7 +167,7 @@ func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore } // create scene with existing studio io - f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileStore) + f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileCreator) if err != nil { return err } @@ -196,7 +195,7 @@ func makeScene(expectedResult bool) *models.Scene { return s } -func createSceneFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.VideoFile, error) { +func createSceneFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.VideoFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -207,21 +206,21 @@ func createSceneFile(ctx context.Context, name string, folderStore file.FolderSt folderID := folder.ID - f := &file.VideoFile{ - BaseFile: &file.BaseFile{ + f := &models.VideoFile{ + BaseFile: &models.BaseFile{ Basename: basename, ParentFolderID: folderID, }, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, fmt.Errorf("creating scene file %q: %w", name, err) } return f, nil } -func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folderPath string) (*file.Folder, error) { +func getOrCreateFolder(ctx context.Context, folderStore models.FolderFinderCreator, folderPath string) (*models.Folder, error) { f, err := folderStore.FindByPath(ctx, folderPath) if err != nil { return nil, fmt.Errorf("getting folder by path: %w", err) @@ -231,7 +230,7 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder return f, nil } - var parentID file.FolderID + var parentID models.FolderID dir := filepath.Dir(folderPath) if dir != "." { parent, err := getOrCreateFolder(ctx, folderStore, dir) @@ -242,7 +241,7 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder parentID = parent.ID } - f = &file.Folder{ + f = &models.Folder{ Path: folderPath, } @@ -257,8 +256,8 @@ func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folder return f, nil } -func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *file.VideoFile) error { - err := sqb.Create(ctx, s, []file.ID{f.ID}) +func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *models.VideoFile) error { + err := sqb.Create(ctx, s, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Failed to create scene with path '%s': %s", f.Path, err.Error()) @@ -267,12 +266,12 @@ func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f return nil } -func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the images imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt) for _, fn := range imagePatterns { - f, err := createImageFile(ctx, fn, folderStore, fileStore) + f, err := createImageFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -283,7 +282,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f } } for _, fn := range falseImagePatterns { - f, err := createImageFile(ctx, fn, folderStore, fileStore) + f, err := createImageFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -296,7 +295,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f // add organized images for _, fn := range imagePatterns { - f, err := createImageFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createImageFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -310,7 +309,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f } // create image with existing studio io - f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileStore) + f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileCreator) if err != nil { return err } @@ -326,7 +325,7 @@ func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore f return nil } -func createImageFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.ImageFile, error) { +func createImageFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.ImageFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -337,14 +336,14 @@ func createImageFile(ctx context.Context, name string, folderStore file.FolderSt folderID := folder.ID - f := &file.ImageFile{ - BaseFile: &file.BaseFile{ + f := &models.ImageFile{ + BaseFile: &models.BaseFile{ Basename: basename, ParentFolderID: folderID, }, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, err } @@ -362,10 +361,10 @@ func makeImage(expectedResult bool) *models.Image { return o } -func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *file.ImageFile) error { +func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *models.ImageFile) error { err := w.Create(ctx, &models.ImageCreateInput{ Image: o, - FileIDs: []file.ID{f.ID}, + FileIDs: []models.FileID{f.ID}, }) if err != nil { @@ -375,12 +374,12 @@ func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f * return nil } -func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore file.FolderStore, fileStore file.Store) error { +func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) error { // create the galleries galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt) for _, fn := range galleryPatterns { - f, err := createGalleryFile(ctx, fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -391,7 +390,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt } } for _, fn := range falseGalleryPatterns { - f, err := createGalleryFile(ctx, fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, fn, folderStore, fileCreator) if err != nil { return err } @@ -404,7 +403,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt // add organized galleries for _, fn := range galleryPatterns { - f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileStore) + f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileCreator) if err != nil { return err } @@ -418,7 +417,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt } // create gallery with existing studio io - f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileStore) + f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileCreator) if err != nil { return err } @@ -434,7 +433,7 @@ func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderSt return nil } -func createGalleryFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.BaseFile, error) { +func createGalleryFile(ctx context.Context, name string, folderStore models.FolderFinderCreator, fileCreator models.FileCreator) (*models.BaseFile, error) { folderPath := filepath.Dir(name) basename := filepath.Base(name) @@ -445,12 +444,12 @@ func createGalleryFile(ctx context.Context, name string, folderStore file.Folder folderID := folder.ID - f := &file.BaseFile{ + f := &models.BaseFile{ Basename: basename, ParentFolderID: folderID, } - if err := fileStore.Create(ctx, f); err != nil { + if err := fileCreator.Create(ctx, f); err != nil { return nil, err } @@ -468,8 +467,8 @@ func makeGallery(expectedResult bool) *models.Gallery { return o } -func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *file.BaseFile) error { - err := w.Create(ctx, o, []file.ID{f.ID}) +func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *models.BaseFile) error { + err := w.Create(ctx, o, []models.FileID{f.ID}) if err != nil { return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error()) } diff --git a/internal/autotag/performer.go b/internal/autotag/performer.go index 32364dc5099..cc839f361c6 100644 --- a/internal/autotag/performer.go +++ b/internal/autotag/performer.go @@ -13,21 +13,21 @@ import ( ) type SceneQueryPerformerUpdater interface { - scene.Queryer + models.SceneQueryer models.PerformerIDLoader - scene.PartialUpdater + models.SceneUpdater } type ImageQueryPerformerUpdater interface { - image.Queryer + models.ImageQueryer models.PerformerIDLoader - image.PartialUpdater + models.ImageUpdater } type GalleryQueryPerformerUpdater interface { - gallery.Queryer + models.GalleryQueryer models.PerformerIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getPerformerTaggers(p *models.Performer, cache *match.Cache) []tagger { diff --git a/internal/autotag/scene.go b/internal/autotag/scene.go index 285ff7d7dde..6095905e812 100644 --- a/internal/autotag/scene.go +++ b/internal/autotag/scene.go @@ -9,14 +9,19 @@ import ( "github.com/stashapp/stash/pkg/sliceutil/intslice" ) +type SceneFinderUpdater interface { + models.SceneQueryer + models.SceneUpdater +} + type ScenePerformerUpdater interface { models.PerformerIDLoader - scene.PartialUpdater + models.SceneUpdater } type SceneTagUpdater interface { models.TagIDLoader - scene.PartialUpdater + models.SceneUpdater } func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger { @@ -30,7 +35,7 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger { } // ScenePerformers tags the provided scene with performers whose name matches the scene's path. -func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error { +func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpdater, performerReader models.PerformerAutoTagQueryer, cache *match.Cache) error { t := getSceneFileTagger(s, cache) return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) { @@ -54,7 +59,7 @@ func ScenePerformers(ctx context.Context, s *models.Scene, rw ScenePerformerUpda // SceneStudios tags the provided scene with the first studio whose name matches the scene's path. // // Scenes will not be tagged if studio is already set. -func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error { +func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader models.StudioAutoTagQueryer, cache *match.Cache) error { if s.StudioID != nil { // don't modify return nil @@ -68,7 +73,7 @@ func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, s } // SceneTags tags the provided scene with tags whose name matches the scene's path. -func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error { +func SceneTags(ctx context.Context, s *models.Scene, rw SceneTagUpdater, tagReader models.TagAutoTagQueryer, cache *match.Cache) error { t := getSceneFileTagger(s, cache) return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) { diff --git a/internal/autotag/studio.go b/internal/autotag/studio.go index bfa6c941e64..ef5a6f0da9c 100644 --- a/internal/autotag/studio.go +++ b/internal/autotag/studio.go @@ -3,18 +3,15 @@ package autotag import ( "context" - "github.com/stashapp/stash/pkg/gallery" - "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) // the following functions aren't used in Tagger because they assume // use within a transaction -func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *models.Scene, studioID int) (bool, error) { +func addSceneStudio(ctx context.Context, sceneWriter models.SceneUpdater, o *models.Scene, studioID int) (bool, error) { // don't set if already set if o.StudioID != nil { return false, nil @@ -31,7 +28,7 @@ func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *mo return true, nil } -func addImageStudio(ctx context.Context, imageWriter image.PartialUpdater, i *models.Image, studioID int) (bool, error) { +func addImageStudio(ctx context.Context, imageWriter models.ImageUpdater, i *models.Image, studioID int) (bool, error) { // don't set if already set if i.StudioID != nil { return false, nil @@ -84,11 +81,6 @@ func getStudioTagger(p *models.Studio, aliases []string, cache *match.Cache) []t return ret } -type SceneFinderUpdater interface { - scene.Queryer - scene.PartialUpdater -} - // StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene. func (tagger *Tagger) StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw SceneFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) @@ -120,12 +112,6 @@ func (tagger *Tagger) StudioScenes(ctx context.Context, p *models.Studio, paths return nil } -type ImageFinderUpdater interface { - image.Queryer - Find(ctx context.Context, id int) (*models.Image, error) - UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) -} - // StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image. func (tagger *Tagger) StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw ImageFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) @@ -157,12 +143,6 @@ func (tagger *Tagger) StudioImages(ctx context.Context, p *models.Studio, paths return nil } -type GalleryFinderUpdater interface { - gallery.Queryer - gallery.PartialUpdater - Find(ctx context.Context, id int) (*models.Gallery, error) -} - // StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery. func (tagger *Tagger) StudioGalleries(ctx context.Context, p *models.Studio, paths []string, aliases []string, rw GalleryFinderUpdater) error { t := getStudioTagger(p, aliases, tagger.Cache) diff --git a/internal/autotag/tag.go b/internal/autotag/tag.go index 94c7c1bb335..8c404f62f28 100644 --- a/internal/autotag/tag.go +++ b/internal/autotag/tag.go @@ -13,21 +13,21 @@ import ( ) type SceneQueryTagUpdater interface { - scene.Queryer + models.SceneQueryer models.TagIDLoader - scene.PartialUpdater + models.SceneUpdater } type ImageQueryTagUpdater interface { - image.Queryer + models.ImageQueryer models.TagIDLoader - image.PartialUpdater + models.ImageUpdater } type GalleryQueryTagUpdater interface { - gallery.Queryer + models.GalleryQueryer models.TagIDLoader - gallery.PartialUpdater + models.GalleryUpdater } func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger { diff --git a/internal/autotag/tagger.go b/internal/autotag/tagger.go index 07cb1da87d3..b814bea608f 100644 --- a/internal/autotag/tagger.go +++ b/internal/autotag/tagger.go @@ -17,12 +17,9 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/gallery" - "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) @@ -54,7 +51,7 @@ func (t *tagger) addLog(otherType, otherName string) { logger.Infof("Added %s '%s' to %s '%s'", otherType, otherName, t.Type, t.Name) } -func (t *tagger) tagPerformers(ctx context.Context, performerReader match.PerformerAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagPerformers(ctx context.Context, performerReader models.PerformerAutoTagQueryer, addFunc addLinkFunc) error { others, err := match.PathToPerformers(ctx, t.Path, performerReader, t.cache, t.trimExt) if err != nil { return err @@ -75,7 +72,7 @@ func (t *tagger) tagPerformers(ctx context.Context, performerReader match.Perfor return nil } -func (t *tagger) tagStudios(ctx context.Context, studioReader match.StudioAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagStudios(ctx context.Context, studioReader models.StudioAutoTagQueryer, addFunc addLinkFunc) error { studio, err := match.PathToStudio(ctx, t.Path, studioReader, t.cache, t.trimExt) if err != nil { return err @@ -96,7 +93,7 @@ func (t *tagger) tagStudios(ctx context.Context, studioReader match.StudioAutoTa return nil } -func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer, addFunc addLinkFunc) error { +func (t *tagger) tagTags(ctx context.Context, tagReader models.TagAutoTagQueryer, addFunc addLinkFunc) error { others, err := match.PathToTags(ctx, t.Path, tagReader, t.cache, t.trimExt) if err != nil { return err @@ -117,7 +114,7 @@ func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer, return nil } -func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addSceneLinkFunc) error { +func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader models.SceneQueryer, addFunc addSceneLinkFunc) error { return match.PathToScenesFn(ctx, t.Name, paths, sceneReader, func(ctx context.Context, p *models.Scene) error { added, err := addFunc(p) @@ -133,7 +130,7 @@ func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scen }) } -func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addImageLinkFunc) error { +func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader models.ImageQueryer, addFunc addImageLinkFunc) error { return match.PathToImagesFn(ctx, t.Name, paths, imageReader, func(ctx context.Context, p *models.Image) error { added, err := addFunc(p) @@ -149,7 +146,7 @@ func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader imag }) } -func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addGalleryLinkFunc) error { +func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader models.GalleryQueryer, addFunc addGalleryLinkFunc) error { return match.PathToGalleriesFn(ctx, t.Name, paths, galleryReader, func(ctx context.Context, p *models.Gallery) error { added, err := addFunc(p) diff --git a/internal/dlna/cds.go b/internal/dlna/cds.go index 826b52acd66..eba98ac489f 100644 --- a/internal/dlna/cds.go +++ b/internal/dlna/cds.go @@ -363,7 +363,7 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string) if err := txn.WithReadTxn(context.TODO(), me.txnManager, func(ctx context.Context) error { scene, err = me.repository.SceneFinder.Find(ctx, sceneID) if scene != nil { - err = scene.LoadPrimaryFile(ctx, me.repository.FileFinder) + err = scene.LoadPrimaryFile(ctx, me.repository.FileGetter) } if err != nil { @@ -478,7 +478,7 @@ func (me *contentDirectoryService) getVideos(sceneFilter *models.SceneFilterType } } else { for _, s := range scenes { - if err := s.LoadPrimaryFile(ctx, me.repository.FileFinder); err != nil { + if err := s.LoadPrimaryFile(ctx, me.repository.FileGetter); err != nil { return err } @@ -506,7 +506,7 @@ func (me *contentDirectoryService) getPageVideos(sceneFilter *models.SceneFilter sort := me.VideoSortOrder direction := getSortDirection(sceneFilter, sort) var err error - objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileFinder, page, host, sort, direction) + objs, err = pager.getPageVideos(ctx, me.repository.SceneFinder, me.repository.FileGetter, page, host, sort, direction) if err != nil { return err } diff --git a/internal/dlna/dms.go b/internal/dlna/dms.go index 502dbe0e44e..fe078aab022 100644 --- a/internal/dlna/dms.go +++ b/internal/dlna/dms.go @@ -48,13 +48,12 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/txn" ) type SceneFinder interface { - scene.Queryer - scene.IDFinder + models.SceneGetter + models.SceneQueryer } type StudioFinder interface { diff --git a/internal/dlna/paging.go b/internal/dlna/paging.go index bd1b0028375..fae6ebf1360 100644 --- a/internal/dlna/paging.go +++ b/internal/dlna/paging.go @@ -6,7 +6,6 @@ import ( "math" "strconv" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" ) @@ -20,7 +19,7 @@ func (p *scenePager) getPageID(page int) string { return p.parentID + "/page/" + strconv.Itoa(page) } -func (p *scenePager) getPages(ctx context.Context, r scene.Queryer, total int) ([]interface{}, error) { +func (p *scenePager) getPages(ctx context.Context, r models.SceneQueryer, total int) ([]interface{}, error) { var objs []interface{} // get the first scene of each page to set an appropriate title @@ -60,7 +59,7 @@ func (p *scenePager) getPages(ctx context.Context, r scene.Queryer, total int) ( return objs, nil } -func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f file.Finder, page int, host string, sort string, direction models.SortDirectionEnum) ([]interface{}, error) { +func (p *scenePager) getPageVideos(ctx context.Context, r SceneFinder, f models.FileGetter, page int, host string, sort string, direction models.SortDirectionEnum) ([]interface{}, error) { var objs []interface{} findFilter := &models.FindFilterType{ diff --git a/internal/dlna/service.go b/internal/dlna/service.go index 0d8932e0803..d5399e6a11e 100644 --- a/internal/dlna/service.go +++ b/internal/dlna/service.go @@ -8,7 +8,6 @@ import ( "sync" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -16,7 +15,7 @@ import ( type Repository struct { SceneFinder SceneFinder - FileFinder file.Finder + FileGetter models.FileGetter StudioFinder StudioFinder TagFinder TagFinder PerformerFinder PerformerFinder diff --git a/internal/identify/identify.go b/internal/identify/identify.go index 3a9cea6107e..db8ca2f54ab 100644 --- a/internal/identify/identify.go +++ b/internal/identify/identify.go @@ -46,7 +46,7 @@ type SceneIdentifier struct { SceneReaderUpdater SceneReaderUpdater StudioReaderWriter models.StudioReaderWriter PerformerCreator PerformerCreator - TagCreatorFinder TagCreatorFinder + TagFinderCreator models.TagFinderCreator DefaultOptions *MetadataOptions Sources []ScraperSource @@ -176,7 +176,7 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene, sceneReader: t.SceneReaderUpdater, studioReaderWriter: t.StudioReaderWriter, performerCreator: t.PerformerCreator, - tagCreatorFinder: t.TagCreatorFinder, + tagCreator: t.TagFinderCreator, scene: s, result: result, fieldOptions: fieldOptions, @@ -332,7 +332,7 @@ func (t *SceneIdentifier) addTagToScene(ctx context.Context, txnManager txn.Mana return err } - ret, err := t.TagCreatorFinder.Find(ctx, tagID) + ret, err := t.TagFinderCreator.Find(ctx, tagID) if err != nil { logger.Infof("Added tag id %s to skipped scene %s", tagToAdd, s.Path) } else { diff --git a/internal/identify/identify_test.go b/internal/identify/identify_test.go index 30dd72803fb..04ff0360765 100644 --- a/internal/identify/identify_test.go +++ b/internal/identify/identify_test.go @@ -186,7 +186,7 @@ func TestSceneIdentifier_Identify(t *testing.T) { t.Run(tt.name, func(t *testing.T) { identifier := SceneIdentifier{ SceneReaderUpdater: mockSceneReaderWriter, - TagCreatorFinder: mockTagFinderCreator, + TagFinderCreator: mockTagFinderCreator, DefaultOptions: defaultOptions, Sources: sources, SceneUpdatePostHookExecutor: mockHookExecutor{}, diff --git a/internal/identify/performer.go b/internal/identify/performer.go index f544473d2b2..947bb09d6f8 100644 --- a/internal/identify/performer.go +++ b/internal/identify/performer.go @@ -10,7 +10,7 @@ import ( ) type PerformerCreator interface { - Create(ctx context.Context, newPerformer *models.Performer) error + models.PerformerCreator UpdateImage(ctx context.Context, performerID int, image []byte) error } diff --git a/internal/identify/scene.go b/internal/identify/scene.go index 160a0a8b646..9a951c13b18 100644 --- a/internal/identify/scene.go +++ b/internal/identify/scene.go @@ -11,32 +11,29 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/sliceutil" "github.com/stashapp/stash/pkg/sliceutil/intslice" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type SceneReaderUpdater interface { +type SceneCoverGetter interface { GetCover(ctx context.Context, sceneID int) ([]byte, error) - scene.Updater +} + +type SceneReaderUpdater interface { + SceneCoverGetter + models.SceneUpdater models.PerformerIDLoader models.TagIDLoader models.StashIDLoader models.URLLoader } -type TagCreatorFinder interface { - Create(ctx context.Context, newTag *models.Tag) error - tag.Finder -} - type sceneRelationships struct { - sceneReader SceneReaderUpdater + sceneReader SceneCoverGetter studioReaderWriter models.StudioReaderWriter performerCreator PerformerCreator - tagCreatorFinder TagCreatorFinder + tagCreator models.TagCreator scene *models.Scene result *scrapeResult fieldOptions map[string]*FieldOptions @@ -173,7 +170,7 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) { CreatedAt: now, UpdatedAt: now, } - err := g.tagCreatorFinder.Create(ctx, &newTag) + err := g.tagCreator.Create(ctx, &newTag) if err != nil { return nil, fmt.Errorf("error creating tag: %w", err) } diff --git a/internal/identify/scene_test.go b/internal/identify/scene_test.go index ae6963ee379..bb0598b060a 100644 --- a/internal/identify/scene_test.go +++ b/internal/identify/scene_test.go @@ -377,9 +377,9 @@ func Test_sceneRelationships_tags(t *testing.T) { })).Return(errors.New("error creating tag")) tr := sceneRelationships{ - sceneReader: mockSceneReaderWriter, - tagCreatorFinder: mockTagReaderWriter, - fieldOptions: make(map[string]*FieldOptions), + sceneReader: mockSceneReaderWriter, + tagCreator: mockTagReaderWriter, + fieldOptions: make(map[string]*FieldOptions), } tests := []struct { diff --git a/internal/manager/fingerprint.go b/internal/manager/fingerprint.go index fc183cc6a1b..b30ac453263 100644 --- a/internal/manager/fingerprint.go +++ b/internal/manager/fingerprint.go @@ -10,13 +10,14 @@ import ( "github.com/stashapp/stash/pkg/hash/md5" "github.com/stashapp/stash/pkg/hash/oshash" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) type fingerprintCalculator struct { Config *config.Instance } -func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) (*file.Fingerprint, error) { +func (c *fingerprintCalculator) calculateOshash(f *models.BaseFile, o file.Opener) (*models.Fingerprint, error) { r, err := o.Open() if err != nil { return nil, fmt.Errorf("opening file: %w", err) @@ -34,13 +35,13 @@ func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) return nil, fmt.Errorf("calculating oshash: %w", err) } - return &file.Fingerprint{ - Type: file.FingerprintTypeOshash, + return &models.Fingerprint{ + Type: models.FingerprintTypeOshash, Fingerprint: hash, }, nil } -func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, error) { +func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*models.Fingerprint, error) { r, err := o.Open() if err != nil { return nil, fmt.Errorf("opening file: %w", err) @@ -53,24 +54,24 @@ func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, return nil, fmt.Errorf("calculating md5: %w", err) } - return &file.Fingerprint{ - Type: file.FingerprintTypeMD5, + return &models.Fingerprint{ + Type: models.FingerprintTypeMD5, Fingerprint: hash, }, nil } -func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener, useExisting bool) ([]file.Fingerprint, error) { - var ret []file.Fingerprint +func (c *fingerprintCalculator) CalculateFingerprints(f *models.BaseFile, o file.Opener, useExisting bool) ([]models.Fingerprint, error) { + var ret []models.Fingerprint calculateMD5 := true if useAsVideo(f.Path) { var ( - fp *file.Fingerprint + fp *models.Fingerprint err error ) if useExisting { - fp = f.Fingerprints.For(file.FingerprintTypeOshash) + fp = f.Fingerprints.For(models.FingerprintTypeOshash) } if fp == nil { @@ -89,12 +90,12 @@ func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.O if calculateMD5 { var ( - fp *file.Fingerprint + fp *models.Fingerprint err error ) if useExisting { - fp = f.Fingerprints.For(file.FingerprintTypeMD5) + fp = f.Fingerprints.For(models.FingerprintTypeMD5) } if fp == nil { diff --git a/internal/manager/manager.go b/internal/manager/manager.go index 0b1c50abe42..e199f9ce78a 100644 --- a/internal/manager/manager.go +++ b/internal/manager/manager.go @@ -26,6 +26,7 @@ import ( "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/plugin" "github.com/stashapp/stash/pkg/scene" @@ -222,7 +223,7 @@ func initialize() error { instance.DLNAService = dlna.NewService(instance.Repository, dlna.Repository{ SceneFinder: instance.Repository.Scene, - FileFinder: instance.Repository.File, + FileGetter: instance.Repository.File, StudioFinder: instance.Repository.Studio, TagFinder: instance.Repository.Tag, PerformerFinder: instance.Repository.Performer, @@ -280,15 +281,15 @@ func initialize() error { return nil } -func videoFileFilter(ctx context.Context, f file.File) bool { +func videoFileFilter(ctx context.Context, f models.File) bool { return useAsVideo(f.Base().Path) } -func imageFileFilter(ctx context.Context, f file.File) bool { +func imageFileFilter(ctx context.Context, f models.File) bool { return useAsImage(f.Base().Path) } -func galleryFileFilter(ctx context.Context, f file.File) bool { +func galleryFileFilter(ctx context.Context, f models.File) bool { return isZip(f.Base().Basename) } @@ -297,7 +298,7 @@ func makeScanner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Scanner { Repository: file.Repository{ Manager: db, DatabaseProvider: db, - Store: db.File, + FileStore: db.File, FolderStore: db.Folder, }, FileDecorators: []file.Decorator{ @@ -325,7 +326,7 @@ func makeCleaner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Cleaner { Repository: file.Repository{ Manager: db, DatabaseProvider: db, - Store: db.File, + FileStore: db.File, FolderStore: db.Folder, }, Handlers: []file.CleanHandler{ diff --git a/internal/manager/repository.go b/internal/manager/repository.go index f6f8176aa86..77859d06baa 100644 --- a/internal/manager/repository.go +++ b/internal/manager/repository.go @@ -3,8 +3,6 @@ package manager import ( "context" - "github.com/stashapp/stash/pkg/file" - "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" @@ -12,49 +10,17 @@ import ( "github.com/stashapp/stash/pkg/txn" ) -type ImageReaderWriter interface { - models.ImageReaderWriter - image.FinderCreatorUpdater - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type GalleryReaderWriter interface { - models.GalleryReaderWriter - gallery.FinderCreatorUpdater - gallery.Finder - models.FileLoader - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type SceneReaderWriter interface { - models.SceneReaderWriter - scene.CreatorUpdater - models.URLLoader - GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) -} - -type FileReaderWriter interface { - file.Store - Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error) - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) - IsPrimary(ctx context.Context, fileID file.ID) (bool, error) -} - -type FolderReaderWriter interface { - file.FolderStore -} - type Repository struct { models.TxnManager - File FileReaderWriter - Folder FolderReaderWriter - Gallery GalleryReaderWriter + File models.FileReaderWriter + Folder models.FolderReaderWriter + Gallery models.GalleryReaderWriter GalleryChapter models.GalleryChapterReaderWriter - Image ImageReaderWriter + Image models.ImageReaderWriter Movie models.MovieReaderWriter Performer models.PerformerReaderWriter - Scene SceneReaderWriter + Scene models.SceneReaderWriter SceneMarker models.SceneMarkerReaderWriter Studio models.StudioReaderWriter Tag models.TagReaderWriter @@ -94,15 +60,15 @@ func sqliteRepository(d *sqlite.Database) Repository { } type SceneService interface { - Create(ctx context.Context, input *models.Scene, fileIDs []file.ID, coverImage []byte) (*models.Scene, error) - AssignFile(ctx context.Context, sceneID int, fileID file.ID) error + Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) + AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error Merge(ctx context.Context, sourceIDs []int, destinationID int, values models.ScenePartial) error Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error } type ImageService interface { Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error - DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) + DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } type GalleryService interface { diff --git a/internal/manager/scene.go b/internal/manager/scene.go index 39b96fec74f..ff551754ed1 100644 --- a/internal/manager/scene.go +++ b/internal/manager/scene.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/internal/manager/config" "github.com/stashapp/stash/pkg/ffmpeg" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/models" ) @@ -57,7 +56,7 @@ var ( } ) -func GetVideoFileContainer(file *file.VideoFile) (ffmpeg.Container, error) { +func GetVideoFileContainer(file *models.VideoFile) (ffmpeg.Container, error) { var container ffmpeg.Container format := file.Format if format != "" { @@ -88,7 +87,7 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL *url.URL, maxStrea // convert StreamingResolutionEnum to ResolutionEnum maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize) - sceneResolution := file.GetMinResolution(pf) + sceneResolution := models.GetMinResolution(pf) includeSceneStreamPath := func(streamingResolution models.StreamingResolutionEnum) bool { var minResolution int if streamingResolution == models.StreamingResolutionEnumOriginal { diff --git a/internal/manager/task_clean.go b/internal/manager/task_clean.go index 43cbc92d986..f5c3e1d547b 100644 --- a/internal/manager/task_clean.go +++ b/internal/manager/task_clean.go @@ -257,7 +257,7 @@ type cleanHandler struct { PluginCache *plugin.Cache } -func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { if err := h.handleRelatedScenes(ctx, fileDeleter, fileID); err != nil { return err } @@ -271,11 +271,11 @@ func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter return nil } -func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID file.FolderID) error { +func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID models.FolderID) error { return h.deleteRelatedFolderGalleries(ctx, folderID) } -func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { mgr := GetInstance() sceneQB := mgr.Database.Scene scenes, err := sceneQB.FindByFileID(ctx, fileID) @@ -313,7 +313,7 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range scene.Files.List() { if f.ID != fileID { newPrimaryID = f.ID @@ -332,7 +332,7 @@ func (h *cleanHandler) handleRelatedScenes(ctx context.Context, fileDeleter *fil return nil } -func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.ID) error { +func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID models.FileID) error { mgr := GetInstance() qb := mgr.Database.Gallery galleries, err := qb.FindByFileID(ctx, fileID) @@ -358,7 +358,7 @@ func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.I }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range g.Files.List() { if f.Base().ID != fileID { newPrimaryID = f.Base().ID @@ -377,7 +377,7 @@ func (h *cleanHandler) handleRelatedGalleries(ctx context.Context, fileID file.I return nil } -func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID file.FolderID) error { +func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID models.FolderID) error { mgr := GetInstance() qb := mgr.Database.Gallery galleries, err := qb.FindByFolderID(ctx, folderID) @@ -401,7 +401,7 @@ func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderI return nil } -func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error { +func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID models.FileID) error { mgr := GetInstance() imageQB := mgr.Database.Image images, err := imageQB.FindByFileID(ctx, fileID) @@ -431,7 +431,7 @@ func (h *cleanHandler) handleRelatedImages(ctx context.Context, fileDeleter *fil }, nil) } else { // set the primary file to a remaining file - var newPrimaryID file.ID + var newPrimaryID models.FileID for _, f := range i.Files.List() { if f.Base().ID != fileID { newPrimaryID = f.Base().ID diff --git a/internal/manager/task_export.go b/internal/manager/task_export.go index f186d3eb48d..98ae1918f31 100644 --- a/internal/manager/task_export.go +++ b/internal/manager/task_export.go @@ -13,7 +13,6 @@ import ( "time" "github.com/stashapp/stash/internal/manager/config" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" @@ -386,7 +385,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Reposit logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers) } -func exportFile(f file.File, t *ExportTask) { +func exportFile(f models.File, t *ExportTask) { newFileJSON := fileToJSON(f) fn := newFileJSON.Filename() @@ -396,7 +395,7 @@ func exportFile(f file.File, t *ExportTask) { } } -func fileToJSON(f file.File) jsonschema.DirEntry { +func fileToJSON(f models.File) jsonschema.DirEntry { bf := f.Base() base := jsonschema.BaseFile{ @@ -422,7 +421,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { } switch ff := f.(type) { - case *file.VideoFile: + case *models.VideoFile: base.Type = jsonschema.DirEntryTypeVideo return jsonschema.VideoFile{ BaseFile: &base, @@ -437,7 +436,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { Interactive: ff.Interactive, InteractiveSpeed: ff.InteractiveSpeed, } - case *file.ImageFile: + case *models.ImageFile: base.Type = jsonschema.DirEntryTypeImage return jsonschema.ImageFile{ BaseFile: &base, @@ -450,7 +449,7 @@ func fileToJSON(f file.File) jsonschema.DirEntry { return &base } -func exportFolder(f file.Folder, t *ExportTask) { +func exportFolder(f models.Folder, t *ExportTask) { newFileJSON := folderToJSON(f) fn := newFileJSON.Filename() @@ -460,7 +459,7 @@ func exportFolder(f file.Folder, t *ExportTask) { } } -func folderToJSON(f file.Folder) jsonschema.DirEntry { +func folderToJSON(f models.Folder) jsonschema.DirEntry { base := jsonschema.BaseDirEntry{ Type: jsonschema.DirEntryTypeFolder, ModTime: json.JSONTime{Time: f.ModTime}, diff --git a/internal/manager/task_generate_clip_preview.go b/internal/manager/task_generate_clip_preview.go index c0ecfeedfdb..e8f98cd17d4 100644 --- a/internal/manager/task_generate_clip_preview.go +++ b/internal/manager/task_generate_clip_preview.go @@ -4,7 +4,6 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/logger" @@ -44,7 +43,7 @@ func (t *GenerateClipPreviewTask) Start(ctx context.Context) { } func (t *GenerateClipPreviewTask) required() bool { - _, ok := t.Image.Files.Primary().(*file.VideoFile) + _, ok := t.Image.Files.Primary().(*models.VideoFile) if !ok { return false } diff --git a/internal/manager/task_generate_markers.go b/internal/manager/task_generate_markers.go index 5d709874f39..fa5ac902255 100644 --- a/internal/manager/task_generate_markers.go +++ b/internal/manager/task_generate_markers.go @@ -5,7 +5,6 @@ import ( "fmt" "path/filepath" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -102,7 +101,7 @@ func (t *GenerateMarkersTask) generateSceneMarkers(ctx context.Context) { } } -func (t *GenerateMarkersTask) generateMarker(videoFile *file.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) { +func (t *GenerateMarkersTask) generateMarker(videoFile *models.VideoFile, scene *models.Scene, sceneMarker *models.SceneMarker) { sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm) seconds := int(sceneMarker.Seconds) diff --git a/internal/manager/task_generate_phash.go b/internal/manager/task_generate_phash.go index 8ae84b02e03..9f3945da34c 100644 --- a/internal/manager/task_generate_phash.go +++ b/internal/manager/task_generate_phash.go @@ -4,7 +4,6 @@ import ( "context" "fmt" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/hash/videophash" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -12,11 +11,11 @@ import ( ) type GeneratePhashTask struct { - File *file.VideoFile + File *models.VideoFile Overwrite bool fileNamingAlgorithm models.HashAlgorithm txnManager txn.Manager - fileUpdater file.Updater + fileUpdater models.FileUpdater } func (t *GeneratePhashTask) GetDescription() string { @@ -38,8 +37,8 @@ func (t *GeneratePhashTask) Start(ctx context.Context) { if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error { qb := t.fileUpdater hashValue := int64(*hash) - t.File.Fingerprints = t.File.Fingerprints.AppendUnique(file.Fingerprint{ - Type: file.FingerprintTypePhash, + t.File.Fingerprints = t.File.Fingerprints.AppendUnique(models.Fingerprint{ + Type: models.FingerprintTypePhash, Fingerprint: hashValue, }) @@ -54,5 +53,5 @@ func (t *GeneratePhashTask) required() bool { return true } - return t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil + return t.File.Fingerprints.Get(models.FingerprintTypePhash) == nil } diff --git a/internal/manager/task_identify.go b/internal/manager/task_identify.go index f7ee5784cbd..0022a69ca31 100644 --- a/internal/manager/task_identify.go +++ b/internal/manager/task_identify.go @@ -136,7 +136,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source SceneReaderUpdater: instance.Repository.Scene, StudioReaderWriter: instance.Repository.Studio, PerformerCreator: instance.Repository.Performer, - TagCreatorFinder: instance.Repository.Tag, + TagFinderCreator: instance.Repository.Tag, DefaultOptions: j.input.Options, Sources: sources, diff --git a/internal/manager/task_import.go b/internal/manager/task_import.go index aa0e7ec6358..c0f97e254ae 100644 --- a/internal/manager/task_import.go +++ b/internal/manager/task_import.go @@ -10,6 +10,7 @@ import ( "path/filepath" "github.com/99designs/gqlgen/graphql" + "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/image" @@ -281,7 +282,7 @@ func (t *ImportTask) ImportStudios(ctx context.Context) { logger.Info("[studios] import complete") } -func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, readerWriter studio.NameFinderCreatorUpdater) error { +func (t *ImportTask) ImportStudio(ctx context.Context, studioJSON *jsonschema.Studio, pendingParent map[string][]*jsonschema.Studio, readerWriter studio.ImporterReaderWriter) error { importer := &studio.Importer{ ReaderWriter: readerWriter, Input: *studioJSON, @@ -385,7 +386,7 @@ func (t *ImportTask) ImportFiles(ctx context.Context) { if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error { return t.ImportFile(ctx, fileJSON, pendingParent) }); err != nil { - if errors.Is(err, errZipFileNotExist) { + if errors.Is(err, file.ErrZipFileNotExist) { // add to the pending parent list so that it is created after the parent s := pendingParent[fileJSON.DirEntry().ZipFile] s = append(s, fileJSON) @@ -421,7 +422,7 @@ func (t *ImportTask) ImportFile(ctx context.Context, fileJSON jsonschema.DirEntr r := t.txnManager readerWriter := r.File - fileImporter := &fileFolderImporter{ + fileImporter := &file.Importer{ ReaderWriter: readerWriter, FolderStore: r.Folder, Input: fileJSON, @@ -569,7 +570,7 @@ func (t *ImportTask) ImportTags(ctx context.Context) { logger.Info("[tags] import complete") } -func (t *ImportTask) ImportTag(ctx context.Context, tagJSON *jsonschema.Tag, pendingParent map[string][]*jsonschema.Tag, fail bool, readerWriter tag.NameFinderCreatorUpdater) error { +func (t *ImportTask) ImportTag(ctx context.Context, tagJSON *jsonschema.Tag, pendingParent map[string][]*jsonschema.Tag, fail bool, readerWriter tag.ImporterReaderWriter) error { importer := &tag.Importer{ ReaderWriter: readerWriter, Input: *tagJSON, diff --git a/internal/manager/task_scan.go b/internal/manager/task_scan.go index 7c5e2015641..f1f3e39272f 100644 --- a/internal/manager/task_scan.go +++ b/internal/manager/task_scan.go @@ -96,17 +96,17 @@ func newExtensionConfig(c *config.Instance) extensionConfig { } type fileCounter interface { - CountByFileID(ctx context.Context, fileID file.ID) (int, error) + CountByFileID(ctx context.Context, fileID models.FileID) (int, error) } type galleryFinder interface { fileCounter - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) } type sceneFinder interface { fileCounter - FindByPrimaryFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) + FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } // handlerRequiredFilter returns true if a File's handler needs to be executed despite the file not being updated. @@ -139,7 +139,7 @@ func newHandlerRequiredFilter(c *config.Instance) *handlerRequiredFilter { } } -func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool { +func (f *handlerRequiredFilter) Accept(ctx context.Context, ff models.File) bool { path := ff.Base().Path isVideoFile := useAsVideo(path) isImageFile := useAsImage(path) @@ -213,7 +213,7 @@ func (f *handlerRequiredFilter) Accept(ctx context.Context, ff file.File) bool { // clean captions - scene handler handles this as well, but // unchanged files aren't processed by the scene handler - videoFile, _ := ff.(*file.VideoFile) + videoFile, _ := ff.(*models.VideoFile) if videoFile != nil { if err := video.CleanCaptions(ctx, videoFile, f.txnManager, f.CaptionUpdater); err != nil { logger.Errorf("Error cleaning captions: %v", err) @@ -370,7 +370,7 @@ type imageGenerators struct { progress *job.Progress } -func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file.File) error { +func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f models.File) error { const overwrite = false progress := g.progress @@ -387,12 +387,12 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file. } // avoid adding a task if the file isn't a video file - _, isVideo := f.(*file.VideoFile) + _, isVideo := f.(*models.VideoFile) if isVideo && t.ScanGenerateClipPreviews { // this is a bit of a hack: the task requires files to be loaded, but // we don't really need to since we already have the file ii := *i - ii.Files = models.NewRelatedFiles([]file.File{f}) + ii.Files = models.NewRelatedFiles([]models.File{f}) progress.AddTotal(1) previewsFn := func(ctx context.Context) { @@ -415,7 +415,7 @@ func (g *imageGenerators) Generate(ctx context.Context, i *models.Image, f file. return nil } -func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f file.File) error { +func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image, f models.File) error { thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth) exists, _ := fsutil.FileExists(thumbPath) if exists { @@ -424,12 +424,12 @@ func (g *imageGenerators) generateThumbnail(ctx context.Context, i *models.Image path := f.Base().Path - asFrame, ok := f.(file.VisualFile) + vf, ok := f.(models.VisualFile) if !ok { - return fmt.Errorf("file %s does not implement Frame", path) + return fmt.Errorf("file %s is not a visual file", path) } - if asFrame.GetHeight() <= models.DefaultGthumbWidth && asFrame.GetWidth() <= models.DefaultGthumbWidth { + if vf.GetHeight() <= models.DefaultGthumbWidth && vf.GetWidth() <= models.DefaultGthumbWidth { return nil } @@ -466,7 +466,7 @@ type sceneGenerators struct { progress *job.Progress } -func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error { +func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error { const overwrite = false progress := g.progress diff --git a/pkg/ffmpeg/stream_segmented.go b/pkg/ffmpeg/stream_segmented.go index fa7347582a9..68e6f42822b 100644 --- a/pkg/ffmpeg/stream_segmented.go +++ b/pkg/ffmpeg/stream_segmented.go @@ -16,7 +16,6 @@ import ( "sync/atomic" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -51,7 +50,7 @@ const ( type StreamType struct { Name string SegmentType *SegmentType - ServeManifest func(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) + ServeManifest func(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) Args func(codec VideoCodec, segment int, videoFilter VideoFilter, videoOnly bool, outputDir string) Args } @@ -250,7 +249,7 @@ var ErrInvalidSegment = errors.New("invalid segment") type StreamOptions struct { StreamType *StreamType - VideoFile *file.VideoFile + VideoFile *models.VideoFile Resolution string Hash string Segment string @@ -279,7 +278,7 @@ type waitingSegment struct { type runningStream struct { dir string streamType *StreamType - vf *file.VideoFile + vf *models.VideoFile maxTranscodeSize int outputDir string @@ -394,7 +393,7 @@ func (tp *transcodeProcess) checkSegments() { } } -func lastSegment(vf *file.VideoFile) int { +func lastSegment(vf *models.VideoFile) int { return int(math.Ceil(vf.Duration/segmentLength)) - 1 } @@ -405,7 +404,7 @@ func segmentExists(path string) bool { // serveHLSManifest serves a generated HLS playlist. The URLs for the segments // are of the form {r.URL}/%d.ts{?urlQuery} where %d is the segment index. -func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) { +func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) { if sm.cacheDir == "" { logger.Error("[transcode] cannot live transcode with HLS because cache dir is unset") http.Error(w, "cannot live transcode with HLS because cache dir is unset", http.StatusServiceUnavailable) @@ -460,7 +459,7 @@ func serveHLSManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, } // serveDASHManifest serves a generated DASH manifest. -func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *file.VideoFile, resolution string) { +func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request, vf *models.VideoFile, resolution string) { if sm.cacheDir == "" { logger.Error("[transcode] cannot live transcode with DASH because cache dir is unset") http.Error(w, "cannot live transcode files with DASH because cache dir is unset", http.StatusServiceUnavailable) @@ -550,7 +549,7 @@ func serveDASHManifest(sm *StreamManager, w http.ResponseWriter, r *http.Request utils.ServeStaticContent(w, r, buf.Bytes()) } -func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *file.VideoFile, resolution string) { +func (sm *StreamManager) ServeManifest(w http.ResponseWriter, r *http.Request, streamType *StreamType, vf *models.VideoFile, resolution string) { streamType.ServeManifest(sm, w, r, vf, resolution) } diff --git a/pkg/ffmpeg/stream_transcode.go b/pkg/ffmpeg/stream_transcode.go index cd123183ffb..c5593ab8bf8 100644 --- a/pkg/ffmpeg/stream_transcode.go +++ b/pkg/ffmpeg/stream_transcode.go @@ -8,7 +8,6 @@ import ( "strings" "syscall" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -134,7 +133,7 @@ var ( type TranscodeOptions struct { StreamType StreamFormat - VideoFile *file.VideoFile + VideoFile *models.VideoFile Resolution string StartTime float64 } diff --git a/pkg/file/clean.go b/pkg/file/clean.go index 44470c5a093..d3e27a774a2 100644 --- a/pkg/file/clean.go +++ b/pkg/file/clean.go @@ -10,12 +10,13 @@ import ( "github.com/stashapp/stash/pkg/job" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) // Cleaner scans through stored file and folder instances and removes those that are no longer present on disk. type Cleaner struct { - FS FS + FS models.FS Repository Repository Handlers []CleanHandler @@ -55,44 +56,44 @@ func (s *Cleaner) Clean(ctx context.Context, options CleanOptions, progress *job } type fileOrFolder struct { - fileID ID - folderID FolderID + fileID models.FileID + folderID models.FolderID } type deleteSet struct { orderedList []fileOrFolder - fileIDSet map[ID]string + fileIDSet map[models.FileID]string - folderIDSet map[FolderID]string + folderIDSet map[models.FolderID]string } func newDeleteSet() deleteSet { return deleteSet{ - fileIDSet: make(map[ID]string), - folderIDSet: make(map[FolderID]string), + fileIDSet: make(map[models.FileID]string), + folderIDSet: make(map[models.FolderID]string), } } -func (s *deleteSet) add(id ID, path string) { +func (s *deleteSet) add(id models.FileID, path string) { if _, ok := s.fileIDSet[id]; !ok { s.orderedList = append(s.orderedList, fileOrFolder{fileID: id}) s.fileIDSet[id] = path } } -func (s *deleteSet) has(id ID) bool { +func (s *deleteSet) has(id models.FileID) bool { _, ok := s.fileIDSet[id] return ok } -func (s *deleteSet) addFolder(id FolderID, path string) { +func (s *deleteSet) addFolder(id models.FolderID, path string) { if _, ok := s.folderIDSet[id]; !ok { s.orderedList = append(s.orderedList, fileOrFolder{folderID: id}) s.folderIDSet[id] = path } } -func (s *deleteSet) hasFolder(id FolderID) bool { +func (s *deleteSet) hasFolder(id models.FolderID) bool { _, ok := s.folderIDSet[id] return ok } @@ -113,7 +114,7 @@ func (j *cleanJob) execute(ctx context.Context) error { if err := txn.WithReadTxn(ctx, j.Repository, func(ctx context.Context) error { var err error - fileCount, err = j.Repository.CountAllInPaths(ctx, j.options.Paths) + fileCount, err = j.Repository.FileStore.CountAllInPaths(ctx, j.options.Paths) if err != nil { return err } @@ -177,7 +178,7 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { return nil } - files, err := j.Repository.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) + files, err := j.Repository.FileStore.FindAllInPaths(ctx, j.options.Paths, batchSize, offset) if err != nil { return fmt.Errorf("error querying for files: %w", err) } @@ -221,9 +222,9 @@ func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error { } // flagFolderForDelete adds folders to the toDelete set, with the leaf folders added first -func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f File) error { +func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f models.File) error { // add contained files first - containedFiles, err := j.Repository.FindByZipFileID(ctx, f.Base().ID) + containedFiles, err := j.Repository.FileStore.FindByZipFileID(ctx, f.Base().ID) if err != nil { return fmt.Errorf("error finding contained files for %q: %w", f.Base().Path, err) } @@ -306,7 +307,7 @@ func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error return nil } -func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *Folder) error { +func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *models.Folder) error { // it is possible that child folders may be included while parent folders are not // so we need to check child folders separately toDelete.addFolder(folder.ID, folder.Path) @@ -314,7 +315,7 @@ func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, return nil } -func (j *cleanJob) shouldClean(ctx context.Context, f File) bool { +func (j *cleanJob) shouldClean(ctx context.Context, f models.File) bool { path := f.Base().Path info, err := f.Base().Info(j.FS) @@ -336,7 +337,7 @@ func (j *cleanJob) shouldClean(ctx context.Context, f File) bool { return !filter.Accept(ctx, path, info) } -func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool { +func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *models.Folder) bool { path := f.Path info, err := f.Info(j.FS) @@ -376,7 +377,7 @@ func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool { return !filter.Accept(ctx, path, info) } -func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) { +func (j *cleanJob) deleteFile(ctx context.Context, fileID models.FileID, fn string) { // delete associated objects fileDeleter := NewDeleter() if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error { @@ -386,14 +387,14 @@ func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) { return err } - return j.Repository.Destroy(ctx, fileID) + return j.Repository.FileStore.Destroy(ctx, fileID) }); err != nil { logger.Errorf("Error deleting file %q from database: %s", fn, err.Error()) return } } -func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn string) { +func (j *cleanJob) deleteFolder(ctx context.Context, folderID models.FolderID, fn string) { // delete associated objects fileDeleter := NewDeleter() if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error { @@ -410,7 +411,7 @@ func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn strin } } -func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID ID) error { +func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID models.FileID) error { for _, h := range j.Handlers { if err := h.HandleFile(ctx, fileDeleter, fileID); err != nil { return err @@ -420,7 +421,7 @@ func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileI return nil } -func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error { +func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID models.FolderID) error { for _, h := range j.Handlers { if err := h.HandleFolder(ctx, fileDeleter, folderID); err != nil { return err diff --git a/pkg/file/delete.go b/pkg/file/delete.go index 9ee27c1767d..88eb5169eac 100644 --- a/pkg/file/delete.go +++ b/pkg/file/delete.go @@ -9,6 +9,7 @@ import ( "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -179,7 +180,7 @@ func (d *Deleter) renameForRestore(path string) error { return d.RenamerRemover.Rename(path+deleteFileSuffix, path) } -func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Deleter, deleteFile bool) error { +func Destroy(ctx context.Context, destroyer models.FileDestroyer, f models.File, fileDeleter *Deleter, deleteFile bool) error { if err := destroyer.Destroy(ctx, f.Base().ID); err != nil { return err } @@ -195,11 +196,11 @@ func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Dele } type ZipDestroyer struct { - FileDestroyer GetterDestroyer - FolderDestroyer FolderGetterDestroyer + FileDestroyer models.FileFinderDestroyer + FolderDestroyer models.FolderFinderDestroyer } -func (d *ZipDestroyer) DestroyZip(ctx context.Context, f File, fileDeleter *Deleter, deleteFile bool) error { +func (d *ZipDestroyer) DestroyZip(ctx context.Context, f models.File, fileDeleter *Deleter, deleteFile bool) error { // destroy contained files files, err := d.FileDestroyer.FindByZipFileID(ctx, f.Base().ID) if err != nil { diff --git a/pkg/file/file.go b/pkg/file/file.go index 50a2d613868..179e1e01af7 100644 --- a/pkg/file/file.go +++ b/pkg/file/file.go @@ -1,225 +1,15 @@ package file import ( - "bytes" - "context" - "io" - "io/fs" - "net/http" - "strconv" - "time" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/txn" ) -// ID represents an ID of a file. -type ID int32 +// Repository provides access to storage methods for files and folders. +type Repository struct { + txn.Manager + txn.DatabaseProvider -func (i ID) String() string { - return strconv.Itoa(int(i)) -} - -// DirEntry represents a file or directory in the file system. -type DirEntry struct { - ZipFileID *ID `json:"zip_file_id"` - - // transient - not persisted - // only guaranteed to have id, path and basename set - ZipFile File - - ModTime time.Time `json:"mod_time"` -} - -func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) { - if e.ZipFile != nil { - zipPath := e.ZipFile.Base().Path - zfs, err := fs.OpenZip(zipPath) - if err != nil { - return nil, err - } - defer zfs.Close() - fs = zfs - } - // else assume os file - - ret, err := fs.Lstat(path) - return ret, err -} - -// File represents a file in the file system. -type File interface { - Base() *BaseFile - SetFingerprints(fp Fingerprints) - Open(fs FS) (io.ReadCloser, error) -} - -// BaseFile represents a file in the file system. -type BaseFile struct { - ID ID `json:"id"` - - DirEntry - - // resolved from parent folder and basename only - not stored in DB - Path string `json:"path"` - - Basename string `json:"basename"` - ParentFolderID FolderID `json:"parent_folder_id"` - - Fingerprints Fingerprints `json:"fingerprints"` - - Size int64 `json:"size"` - - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - -// SetFingerprints sets the fingerprints of the file. -// If a fingerprint of the same type already exists, it is overwritten. -func (f *BaseFile) SetFingerprints(fp Fingerprints) { - for _, v := range fp { - f.SetFingerprint(v) - } -} - -// SetFingerprint sets the fingerprint of the file. -// If a fingerprint of the same type already exists, it is overwritten. -func (f *BaseFile) SetFingerprint(fp Fingerprint) { - for i, existing := range f.Fingerprints { - if existing.Type == fp.Type { - f.Fingerprints[i] = fp - return - } - } - - f.Fingerprints = append(f.Fingerprints, fp) -} - -// Base is used to fulfil the File interface. -func (f *BaseFile) Base() *BaseFile { - return f -} - -func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) { - if f.ZipFile != nil { - zipPath := f.ZipFile.Base().Path - zfs, err := fs.OpenZip(zipPath) - if err != nil { - return nil, err - } - - return zfs.OpenOnly(f.Path) - } - - return fs.Open(f.Path) -} - -func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) { - return f.info(fs, f.Path) -} - -func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { - reader, err := f.Open(fs) - if err != nil { - return err - } - - defer reader.Close() - - content, ok := reader.(io.ReadSeeker) - if !ok { - data, err := io.ReadAll(reader) - if err != nil { - return err - } - content = bytes.NewReader(data) - } - - if r.URL.Query().Has("t") { - w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") - } else { - w.Header().Set("Cache-Control", "no-cache") - } - http.ServeContent(w, r, f.Basename, f.ModTime, content) - - return nil -} - -type Finder interface { - Find(ctx context.Context, id ...ID) ([]File, error) -} - -// Getter provides methods to find Files. -type Getter interface { - Finder - FindByPath(ctx context.Context, path string) (File, error) - FindAllByPath(ctx context.Context, path string) ([]File, error) - FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) - FindByZipFileID(ctx context.Context, zipFileID ID) ([]File, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) - FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error) -} - -type Counter interface { - CountAllInPaths(ctx context.Context, p []string) (int, error) - CountByFolderID(ctx context.Context, folderID FolderID) (int, error) -} - -// Creator provides methods to create Files. -type Creator interface { - Create(ctx context.Context, f File) error -} - -// Updater provides methods to update Files. -type Updater interface { - Update(ctx context.Context, f File) error -} - -type Destroyer interface { - Destroy(ctx context.Context, id ID) error -} - -type GetterUpdater interface { - Getter - Updater -} - -type GetterDestroyer interface { - Getter - Destroyer -} - -// Store provides methods to find, create and update Files. -type Store interface { - Getter - Counter - Creator - Updater - Destroyer - - IsPrimary(ctx context.Context, fileID ID) (bool, error) -} - -// Decorator wraps the Decorate method to add additional functionality while scanning files. -type Decorator interface { - Decorate(ctx context.Context, fs FS, f File) (File, error) - IsMissingMetadata(ctx context.Context, fs FS, f File) bool -} - -type FilteredDecorator struct { - Decorator - Filter -} - -// Decorate runs the decorator if the filter accepts the file. -func (d *FilteredDecorator) Decorate(ctx context.Context, fs FS, f File) (File, error) { - if d.Accept(ctx, f) { - return d.Decorator.Decorate(ctx, fs, f) - } - return f, nil -} - -func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs FS, f File) bool { - if d.Accept(ctx, f) { - return d.Decorator.IsMissingMetadata(ctx, fs, f) - } - - return false + FileStore models.FileReaderWriter + FolderStore models.FolderReaderWriter } diff --git a/pkg/file/folder.go b/pkg/file/folder.go index 5ffd7f2b557..02087dd4117 100644 --- a/pkg/file/folder.go +++ b/pkg/file/folder.go @@ -3,94 +3,16 @@ package file import ( "context" "fmt" - "io/fs" "path/filepath" - "strconv" "strings" "time" -) - -// FolderID represents an ID of a folder. -type FolderID int32 - -// String converts the ID to a string. -func (i FolderID) String() string { - return strconv.Itoa(int(i)) -} - -// Folder represents a folder in the file system. -type Folder struct { - ID FolderID `json:"id"` - DirEntry - Path string `json:"path"` - ParentFolderID *FolderID `json:"parent_folder_id"` - - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` -} - -func (f *Folder) Info(fs FS) (fs.FileInfo, error) { - return f.info(fs, f.Path) -} - -type FolderFinder interface { - Find(ctx context.Context, id FolderID) (*Folder, error) -} - -// FolderPathFinder finds Folders by their path. -type FolderPathFinder interface { - FindByPath(ctx context.Context, path string) (*Folder, error) -} - -// FolderGetter provides methods to find Folders. -type FolderGetter interface { - FolderFinder - FolderPathFinder - FindByZipFileID(ctx context.Context, zipFileID ID) ([]*Folder, error) - FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) - FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) -} -type FolderCounter interface { - CountAllInPaths(ctx context.Context, p []string) (int, error) -} - -// FolderCreator provides methods to create Folders. -type FolderCreator interface { - Create(ctx context.Context, f *Folder) error -} - -type FolderFinderCreator interface { - FolderPathFinder - FolderCreator -} - -// FolderUpdater provides methods to update Folders. -type FolderUpdater interface { - Update(ctx context.Context, f *Folder) error -} - -type FolderDestroyer interface { - Destroy(ctx context.Context, id FolderID) error -} - -type FolderGetterDestroyer interface { - FolderGetter - FolderDestroyer -} - -// FolderStore provides methods to find, create and update Folders. -type FolderStore interface { - FolderGetter - FolderCounter - FolderCreator - FolderUpdater - FolderDestroyer -} + "github.com/stashapp/stash/pkg/models" +) // GetOrCreateFolderHierarchy gets the folder for the given path, or creates a folder hierarchy for the given path if one if no existing folder is found. // Does not create any folders in the file system -func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, path string) (*Folder, error) { +func GetOrCreateFolderHierarchy(ctx context.Context, fc models.FolderFinderCreator, path string) (*models.Folder, error) { // get or create folder hierarchy folder, err := fc.FindByPath(ctx, path) if err != nil { @@ -106,10 +28,10 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, pat now := time.Now() - folder = &Folder{ + folder = &models.Folder{ Path: path, ParentFolderID: &parent.ID, - DirEntry: DirEntry{ + DirEntry: models.DirEntry{ // leave mod time empty for now - it will be updated when the folder is scanned }, CreatedAt: now, @@ -126,7 +48,7 @@ func GetOrCreateFolderHierarchy(ctx context.Context, fc FolderFinderCreator, pat // TransferZipFolderHierarchy creates the folder hierarchy for zipFileID under newPath, and removes // ZipFileID from folders under oldPath. -func TransferZipFolderHierarchy(ctx context.Context, folderStore FolderStore, zipFileID ID, oldPath string, newPath string) error { +func TransferZipFolderHierarchy(ctx context.Context, folderStore models.FolderReaderWriter, zipFileID models.FileID, oldPath string, newPath string) error { zipFolders, err := folderStore.FindByZipFileID(ctx, zipFileID) if err != nil { return err diff --git a/pkg/file/folder_rename_detect.go b/pkg/file/folder_rename_detect.go index 0e52eb7854c..0b57d9c087a 100644 --- a/pkg/file/folder_rename_detect.go +++ b/pkg/file/folder_rename_detect.go @@ -7,27 +7,28 @@ import ( "io/fs" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) type folderRenameCandidate struct { - folder *Folder + folder *models.Folder found int files int } type folderRenameDetector struct { // candidates is a map of folder id to the number of files that match - candidates map[FolderID]folderRenameCandidate + candidates map[models.FolderID]folderRenameCandidate // rejects is a set of folder ids which were found to still exist - rejects map[FolderID]struct{} + rejects map[models.FolderID]struct{} } -func (d *folderRenameDetector) isReject(id FolderID) bool { +func (d *folderRenameDetector) isReject(id models.FolderID) bool { _, ok := d.rejects[id] return ok } -func (d *folderRenameDetector) getCandidate(id FolderID) *folderRenameCandidate { +func (d *folderRenameDetector) getCandidate(id models.FolderID) *folderRenameCandidate { c, ok := d.candidates[id] if !ok { return nil @@ -40,14 +41,14 @@ func (d *folderRenameDetector) setCandidate(c folderRenameCandidate) { d.candidates[c.folder.ID] = c } -func (d *folderRenameDetector) reject(id FolderID) { +func (d *folderRenameDetector) reject(id models.FolderID) { d.rejects[id] = struct{}{} } // bestCandidate returns the folder that is the best candidate for a rename. // This is the folder that has the largest number of its original files that // are still present in the new location. -func (d *folderRenameDetector) bestCandidate() *Folder { +func (d *folderRenameDetector) bestCandidate() *models.Folder { if len(d.candidates) == 0 { return nil } @@ -74,14 +75,14 @@ func (d *folderRenameDetector) bestCandidate() *Folder { return best.folder } -func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*models.Folder, error) { // in order for a folder to be considered moved, the existing folder must be // missing, and the majority of the old folder's files must be present, unchanged, // in the new folder. detector := folderRenameDetector{ - candidates: make(map[FolderID]folderRenameCandidate), - rejects: make(map[FolderID]struct{}), + candidates: make(map[models.FolderID]folderRenameCandidate), + rejects: make(map[models.FolderID]struct{}), } // rejects is a set of folder ids which were found to still exist @@ -117,7 +118,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, } // check if the file exists in the database based on basename, size and mod time - existing, err := s.Repository.Store.FindByFileInfo(ctx, info, size) + existing, err := s.Repository.FileStore.FindByFileInfo(ctx, info, size) if err != nil { return fmt.Errorf("checking for existing file %q: %w", path, err) } @@ -163,7 +164,7 @@ func (s *scanJob) detectFolderMove(ctx context.Context, file scanFile) (*Folder, // parent folder is missing, possible candidate // count the total number of files in the existing folder - count, err := s.Repository.Store.CountByFolderID(ctx, parentFolderID) + count, err := s.Repository.FileStore.CountByFolderID(ctx, parentFolderID) if err != nil { return fmt.Errorf("counting files in folder %d: %w", parentFolderID, err) } diff --git a/pkg/file/frame.go b/pkg/file/frame.go deleted file mode 100644 index de9f7466233..00000000000 --- a/pkg/file/frame.go +++ /dev/null @@ -1,20 +0,0 @@ -package file - -// VisualFile is an interface for files that have a width and height. -type VisualFile interface { - File - GetWidth() int - GetHeight() int - GetFormat() string -} - -func GetMinResolution(f VisualFile) int { - w := f.GetWidth() - h := f.GetHeight() - - if w < h { - return w - } - - return h -} diff --git a/pkg/file/fs.go b/pkg/file/fs.go index 09c7c7c8e19..80148cfa126 100644 --- a/pkg/file/fs.go +++ b/pkg/file/fs.go @@ -6,6 +6,7 @@ import ( "os" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" ) // Opener provides an interface to open a file. @@ -14,7 +15,7 @@ type Opener interface { } type fsOpener struct { - fs FS + fs models.FS name string } @@ -22,15 +23,6 @@ func (o *fsOpener) Open() (io.ReadCloser, error) { return o.fs.Open(o.name) } -// FS represents a file system. -type FS interface { - Stat(name string) (fs.FileInfo, error) - Lstat(name string) (fs.FileInfo, error) - Open(name string) (fs.ReadDirFile, error) - OpenZip(name string) (*ZipFS, error) - IsPathCaseSensitive(path string) (bool, error) -} - // OsFS is a file system backed by the OS. type OsFS struct{} @@ -66,7 +58,7 @@ func (f *OsFS) Open(name string) (fs.ReadDirFile, error) { return os.Open(name) } -func (f *OsFS) OpenZip(name string) (*ZipFS, error) { +func (f *OsFS) OpenZip(name string) (models.ZipFS, error) { info, err := f.Lstat(name) if err != nil { return nil, err diff --git a/pkg/file/handler.go b/pkg/file/handler.go index 5932968b65e..10616eefa50 100644 --- a/pkg/file/handler.go +++ b/pkg/file/handler.go @@ -3,6 +3,8 @@ package file import ( "context" "io/fs" + + "github.com/stashapp/stash/pkg/models" ) // PathFilter provides a filter function for paths. @@ -18,18 +20,18 @@ func (pff PathFilterFunc) Accept(path string) bool { // Filter provides a filter function for Files. type Filter interface { - Accept(ctx context.Context, f File) bool + Accept(ctx context.Context, f models.File) bool } -type FilterFunc func(ctx context.Context, f File) bool +type FilterFunc func(ctx context.Context, f models.File) bool -func (ff FilterFunc) Accept(ctx context.Context, f File) bool { +func (ff FilterFunc) Accept(ctx context.Context, f models.File) bool { return ff(ctx, f) } // Handler provides a handler for Files. type Handler interface { - Handle(ctx context.Context, f File, oldFile File) error + Handle(ctx context.Context, f models.File, oldFile models.File) error } // FilteredHandler is a Handler runs only if the filter accepts the file. @@ -39,7 +41,7 @@ type FilteredHandler struct { } // Handle runs the handler if the filter accepts the file. -func (h *FilteredHandler) Handle(ctx context.Context, f File, oldFile File) error { +func (h *FilteredHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if h.Accept(ctx, f) { return h.Handler.Handle(ctx, f, oldFile) } @@ -48,6 +50,6 @@ func (h *FilteredHandler) Handle(ctx context.Context, f File, oldFile File) erro // CleanHandler provides a handler for cleaning Files and Folders. type CleanHandler interface { - HandleFile(ctx context.Context, fileDeleter *Deleter, fileID ID) error - HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error + HandleFile(ctx context.Context, fileDeleter *Deleter, fileID models.FileID) error + HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID models.FolderID) error } diff --git a/pkg/file/image/scan.go b/pkg/file/image/scan.go index 5203adba9e2..ba22bbee988 100644 --- a/pkg/file/image/scan.go +++ b/pkg/file/image/scan.go @@ -13,6 +13,7 @@ import ( "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" _ "golang.org/x/image/webp" ) @@ -21,10 +22,10 @@ type Decorator struct { FFProbe ffmpeg.FFProbe } -func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) { +func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { base := f.Base() - decorateFallback := func() (file.File, error) { + decorateFallback := func() (models.File, error) { r, err := fs.Open(base.Path) if err != nil { return f, fmt.Errorf("reading image file %q: %w", base.Path, err) @@ -35,7 +36,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file if err != nil { return f, fmt.Errorf("decoding image file %q: %w", base.Path, err) } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: format, Width: c.Width, @@ -58,7 +59,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file // Fallback to catch non-animated avif images that FFProbe detects as video files if probe.Bitrate == 0 && probe.VideoCodec == "av1" { - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: "avif", Width: probe.Width, @@ -78,7 +79,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file return videoFileDecorator.Decorate(ctx, fs, f) } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: base, Format: probe.VideoCodec, Width: probe.Width, @@ -86,14 +87,14 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file }, nil } -func (d *Decorator) IsMissingMetadata(ctx context.Context, fs file.FS, f file.File) bool { +func (d *Decorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { const ( unsetString = "unset" unsetNumber = -1 ) - imf, isImage := f.(*file.ImageFile) - vf, isVideo := f.(*file.VideoFile) + imf, isImage := f.(*models.ImageFile) + vf, isVideo := f.(*models.VideoFile) switch { case isImage: diff --git a/pkg/file/image_file.go b/pkg/file/image_file.go deleted file mode 100644 index 0de2d9b9871..00000000000 --- a/pkg/file/image_file.go +++ /dev/null @@ -1,21 +0,0 @@ -package file - -// ImageFile is an extension of BaseFile to represent image files. -type ImageFile struct { - *BaseFile - Format string `json:"format"` - Width int `json:"width"` - Height int `json:"height"` -} - -func (f ImageFile) GetWidth() int { - return f.Width -} - -func (f ImageFile) GetHeight() int { - return f.Height -} - -func (f ImageFile) GetFormat() string { - return f.Format -} diff --git a/internal/manager/import_file.go b/pkg/file/import.go similarity index 68% rename from internal/manager/import_file.go rename to pkg/file/import.go index bad9d5bce0f..0af94a4d211 100644 --- a/internal/manager/import_file.go +++ b/pkg/file/import.go @@ -1,4 +1,4 @@ -package manager +package file import ( "context" @@ -7,24 +7,22 @@ import ( "path/filepath" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" ) -// HACK: this is all here because of an import loop in jsonschema -> models -> file +var ErrZipFileNotExist = errors.New("zip file does not exist") -var errZipFileNotExist = errors.New("zip file does not exist") - -type fileFolderImporter struct { - ReaderWriter file.Store - FolderStore file.FolderStore +type Importer struct { + ReaderWriter models.FileFinderCreator + FolderStore models.FolderFinderCreator Input jsonschema.DirEntry - file file.File - folder *file.Folder + file models.File + folder *models.Folder } -func (i *fileFolderImporter) PreImport(ctx context.Context) error { +func (i *Importer) PreImport(ctx context.Context) error { var err error switch ff := i.Input.(type) { @@ -37,9 +35,9 @@ func (i *fileFolderImporter) PreImport(ctx context.Context) error { return err } -func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*file.Folder, error) { - ret := file.Folder{ - DirEntry: file.DirEntry{ +func (i *Importer) folderJSONToFolder(ctx context.Context, baseJSON *jsonschema.BaseDirEntry) (*models.Folder, error) { + ret := models.Folder{ + DirEntry: models.DirEntry{ ModTime: baseJSON.ModTime.GetTime(), }, Path: baseJSON.Path, @@ -56,14 +54,14 @@ func (i *fileFolderImporter) folderJSONToFolder(ctx context.Context, baseJSON *j return &ret, nil } -func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (file.File, error) { +func (i *Importer) fileJSONToFile(ctx context.Context, fileJSON jsonschema.DirEntry) (models.File, error) { switch ff := fileJSON.(type) { case *jsonschema.VideoFile: baseFile, err := i.baseFileJSONToBaseFile(ctx, ff.BaseFile) if err != nil { return nil, err } - return &file.VideoFile{ + return &models.VideoFile{ BaseFile: baseFile, Format: ff.Format, Width: ff.Width, @@ -81,7 +79,7 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc if err != nil { return nil, err } - return &file.ImageFile{ + return &models.ImageFile{ BaseFile: baseFile, Format: ff.Format, Width: ff.Width, @@ -94,9 +92,9 @@ func (i *fileFolderImporter) fileJSONToFile(ctx context.Context, fileJSON jsonsc return nil, fmt.Errorf("unknown file type") } -func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*file.BaseFile, error) { - baseFile := file.BaseFile{ - DirEntry: file.DirEntry{ +func (i *Importer) baseFileJSONToBaseFile(ctx context.Context, baseJSON *jsonschema.BaseFile) (*models.BaseFile, error) { + baseFile := models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: baseJSON.ModTime.GetTime(), }, Basename: filepath.Base(baseJSON.Path), @@ -106,7 +104,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO } for _, fp := range baseJSON.Fingerprints { - baseFile.Fingerprints = append(baseFile.Fingerprints, file.Fingerprint{ + baseFile.Fingerprints = append(baseFile.Fingerprints, models.Fingerprint{ Type: fp.Type, Fingerprint: fp.Fingerprint, }) @@ -119,7 +117,7 @@ func (i *fileFolderImporter) baseFileJSONToBaseFile(ctx context.Context, baseJSO return &baseFile, nil } -func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirEntry) error { +func (i *Importer) populateZipFileID(ctx context.Context, f *models.DirEntry) error { zipFilePath := i.Input.DirEntry().ZipFile if zipFilePath != "" { zf, err := i.ReaderWriter.FindByPath(ctx, zipFilePath) @@ -128,7 +126,7 @@ func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirE } if zf == nil { - return errZipFileNotExist + return ErrZipFileNotExist } id := zf.Base().ID @@ -138,15 +136,15 @@ func (i *fileFolderImporter) populateZipFileID(ctx context.Context, f *file.DirE return nil } -func (i *fileFolderImporter) PostImport(ctx context.Context, id int) error { +func (i *Importer) PostImport(ctx context.Context, id int) error { return nil } -func (i *fileFolderImporter) Name() string { +func (i *Importer) Name() string { return i.Input.DirEntry().Path } -func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) { +func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { path := i.Input.DirEntry().Path existing, err := i.ReaderWriter.FindByPath(ctx, path) if err != nil { @@ -161,7 +159,7 @@ func (i *fileFolderImporter) FindExistingID(ctx context.Context) (*int, error) { return nil, nil } -func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string) (*file.Folder, error) { +func (i *Importer) createFolderHierarchy(ctx context.Context, p string) (*models.Folder, error) { parentPath := filepath.Dir(p) if parentPath == p { @@ -177,7 +175,7 @@ func (i *fileFolderImporter) createFolderHierarchy(ctx context.Context, p string return i.getOrCreateFolder(ctx, p, parent) } -func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, parent *file.Folder) (*file.Folder, error) { +func (i *Importer) getOrCreateFolder(ctx context.Context, path string, parent *models.Folder) (*models.Folder, error) { folder, err := i.FolderStore.FindByPath(ctx, path) if err != nil { return nil, err @@ -189,7 +187,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, now := time.Now() - folder = &file.Folder{ + folder = &models.Folder{ Path: path, CreatedAt: now, UpdatedAt: now, @@ -207,7 +205,7 @@ func (i *fileFolderImporter) getOrCreateFolder(ctx context.Context, path string, return folder, nil } -func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) { +func (i *Importer) Create(ctx context.Context) (*int, error) { // create folder hierarchy and set parent folder id path := i.Input.DirEntry().Path path = filepath.Dir(path) @@ -223,7 +221,7 @@ func (i *fileFolderImporter) Create(ctx context.Context) (*int, error) { return i.createFile(ctx, folder) } -func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file.Folder) (*int, error) { +func (i *Importer) createFile(ctx context.Context, parentFolder *models.Folder) (*int, error) { if parentFolder != nil { i.file.Base().ParentFolderID = parentFolder.ID } @@ -236,7 +234,7 @@ func (i *fileFolderImporter) createFile(ctx context.Context, parentFolder *file. return &id, nil } -func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *file.Folder) (*int, error) { +func (i *Importer) createFolder(ctx context.Context, parentFolder *models.Folder) (*int, error) { if parentFolder != nil { i.folder.ParentFolderID = &parentFolder.ID } @@ -249,7 +247,7 @@ func (i *fileFolderImporter) createFolder(ctx context.Context, parentFolder *fil return &id, nil } -func (i *fileFolderImporter) Update(ctx context.Context, id int) error { +func (i *Importer) Update(ctx context.Context, id int) error { // update not supported return nil } diff --git a/pkg/file/move.go b/pkg/file/move.go index 3b3c66ec50d..64a83fed645 100644 --- a/pkg/file/move.go +++ b/pkg/file/move.go @@ -11,6 +11,7 @@ import ( "time" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" ) @@ -40,14 +41,14 @@ func (r folderCreatorStatRenamerImpl) Mkdir(name string, perm os.FileMode) error type Mover struct { Renamer DirMakerStatRenamer - Files GetterUpdater - Folders FolderStore + Files models.FileFinderUpdater + Folders models.FolderReaderWriter moved map[string]string foldersCreated []string } -func NewMover(fileStore GetterUpdater, folderStore FolderStore) *Mover { +func NewMover(fileStore models.FileFinderUpdater, folderStore models.FolderReaderWriter) *Mover { return &Mover{ Files: fileStore, Folders: folderStore, @@ -60,7 +61,7 @@ func NewMover(fileStore GetterUpdater, folderStore FolderStore) *Mover { // Move moves the file to the given folder and basename. If basename is empty, then the existing basename is used. // Assumes that the parent folder exists in the filesystem. -func (m *Mover) Move(ctx context.Context, f File, folder *Folder, basename string) error { +func (m *Mover) Move(ctx context.Context, f models.File, folder *models.Folder, basename string) error { fBase := f.Base() // don't allow moving files in zip files diff --git a/pkg/file/scan.go b/pkg/file/scan.go index badb5ab23e5..a0d301e60c2 100644 --- a/pkg/file/scan.go +++ b/pkg/file/scan.go @@ -13,6 +13,7 @@ import ( "github.com/remeh/sizedwaitgroup" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) @@ -24,15 +25,6 @@ const ( maxRetries = -1 ) -// Repository provides access to storage methods for files and folders. -type Repository struct { - txn.Manager - txn.DatabaseProvider - Store - - FolderStore FolderStore -} - // Scanner scans files into the database. // // The scan process works using two goroutines. The first walks through the provided paths @@ -59,7 +51,7 @@ type Repository struct { // If the file is not a renamed file, then the decorators are fired and the file is created, then // the applicable handlers are fired. type Scanner struct { - FS FS + FS models.FS Repository Repository FingerprintCalculator FingerprintCalculator @@ -67,6 +59,38 @@ type Scanner struct { FileDecorators []Decorator } +// FingerprintCalculator calculates a fingerprint for the provided file. +type FingerprintCalculator interface { + CalculateFingerprints(f *models.BaseFile, o Opener, useExisting bool) ([]models.Fingerprint, error) +} + +// Decorator wraps the Decorate method to add additional functionality while scanning files. +type Decorator interface { + Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) + IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool +} + +type FilteredDecorator struct { + Decorator + Filter +} + +// Decorate runs the decorator if the filter accepts the file. +func (d *FilteredDecorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { + if d.Accept(ctx, f) { + return d.Decorator.Decorate(ctx, fs, f) + } + return f, nil +} + +func (d *FilteredDecorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { + if d.Accept(ctx, f) { + return d.Decorator.IsMissingMetadata(ctx, fs, f) + } + + return false +} + // ProgressReporter is used to report progress of the scan. type ProgressReporter interface { AddTotal(total int) @@ -129,8 +153,8 @@ func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOpti } type scanFile struct { - *BaseFile - fs FS + *models.BaseFile + fs models.FS info fs.FileInfo } @@ -198,7 +222,7 @@ func (s *scanJob) queueFiles(ctx context.Context, paths []string) error { return err } -func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs.WalkDirFunc { +func (s *scanJob) queueFileFunc(ctx context.Context, f models.FS, zipFile *scanFile) fs.WalkDirFunc { return func(path string, d fs.DirEntry, err error) error { if err != nil { // don't let errors prevent scanning @@ -229,8 +253,8 @@ func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs } ff := scanFile{ - BaseFile: &BaseFile{ - DirEntry: DirEntry{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: modTime(info), }, Path: path, @@ -286,7 +310,7 @@ func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs } } -func getFileSize(f FS, path string, info fs.FileInfo) (int64, error) { +func getFileSize(f models.FS, path string, info fs.FileInfo) (int64, error) { // #2196/#3042 - replace size with target size if file is a symlink if info.Mode()&os.ModeSymlink == os.ModeSymlink { targetInfo, err := f.Stat(path) @@ -408,10 +432,10 @@ func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) { }) } -func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, error) { +func (s *scanJob) getFolderID(ctx context.Context, path string) (*models.FolderID, error) { // check the folder cache first if f, ok := s.folderPathToID.Load(path); ok { - v := f.(FolderID) + v := f.(models.FolderID) return &v, nil } @@ -428,7 +452,7 @@ func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, erro return &ret.ID, nil } -func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, error) { +func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*models.FileID, error) { if zipFile == nil { return nil, nil } @@ -441,11 +465,11 @@ func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, err // check the folder cache first if f, ok := s.zipPathToID.Load(path); ok { - v := f.(ID) + v := f.(models.FileID) return &v, nil } - ret, err := s.Repository.FindByPath(ctx, path) + ret, err := s.Repository.FileStore.FindByPath(ctx, path) if err != nil { return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err) } @@ -489,7 +513,7 @@ func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error { }) } -func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*models.Folder, error) { renamed, err := s.handleFolderRename(ctx, file) if err != nil { return nil, err @@ -501,7 +525,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro now := time.Now() - toCreate := &Folder{ + toCreate := &models.Folder{ DirEntry: file.DirEntry, Path: file.Path, CreatedAt: now, @@ -536,7 +560,7 @@ func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, erro return toCreate, nil } -func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*Folder, error) { +func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*models.Folder, error) { // ignore folders in zip files if file.ZipFileID != nil { return nil, nil @@ -572,7 +596,7 @@ func (s *scanJob) handleFolderRename(ctx context.Context, file scanFile) (*Folde return renamedFrom, nil } -func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *Folder) (*Folder, error) { +func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *models.Folder) (*models.Folder, error) { update := false // update if mod time is changed @@ -613,12 +637,12 @@ func modTime(info fs.FileInfo) time.Time { func (s *scanJob) handleFile(ctx context.Context, f scanFile) error { defer s.incrementProgress(f) - var ff File + var ff models.File // don't use a transaction to check if new or existing if err := s.withDB(ctx, func(ctx context.Context) error { // determine if file already exists in data store var err error - ff, err = s.Repository.FindByPath(ctx, f.Path) + ff, err = s.Repository.FileStore.FindByPath(ctx, f.Path) if err != nil { return fmt.Errorf("checking for existing file %q: %w", f.Path, err) } @@ -661,7 +685,7 @@ func (s *scanJob) isZipFile(path string) bool { return false } -func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { +func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (models.File, error) { now := time.Now() baseFile := f.BaseFile @@ -716,7 +740,7 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { // if not renamed, queue file for creation if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Create(ctx, file); err != nil { + if err := s.Repository.FileStore.Create(ctx, file); err != nil { return fmt.Errorf("creating file %q: %w", path, err) } @@ -732,7 +756,7 @@ func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) { return file, nil } -func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, error) { +func (s *scanJob) fireDecorators(ctx context.Context, fs models.FS, f models.File) (models.File, error) { for _, h := range s.FileDecorators { var err error f, err = h.Decorate(ctx, fs, f) @@ -744,7 +768,7 @@ func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, erro return f, nil } -func (s *scanJob) fireHandlers(ctx context.Context, f File, oldFile File) error { +func (s *scanJob) fireHandlers(ctx context.Context, f models.File, oldFile models.File) error { for _, h := range s.handlers { if err := h.Handle(ctx, f, oldFile); err != nil { return err @@ -754,7 +778,7 @@ func (s *scanJob) fireHandlers(ctx context.Context, f File, oldFile File) error return nil } -func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExisting bool) (Fingerprints, error) { +func (s *scanJob) calculateFingerprints(fs models.FS, f *models.BaseFile, path string, useExisting bool) (models.Fingerprints, error) { // only log if we're (re)calculating fingerprints if !useExisting { logger.Infof("Calculating fingerprints for %s ...", path) @@ -772,7 +796,7 @@ func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string, useExis return fp, nil } -func appendFileUnique(v []File, toAdd []File) []File { +func appendFileUnique(v []models.File, toAdd []models.File) []models.File { for _, f := range toAdd { found := false id := f.Base().ID @@ -791,7 +815,7 @@ func appendFileUnique(v []File, toAdd []File) []File { return v } -func (s *scanJob) getFileFS(f *BaseFile) (FS, error) { +func (s *scanJob) getFileFS(f *models.BaseFile) (models.FS, error) { if f.ZipFile == nil { return s.FS, nil } @@ -805,11 +829,11 @@ func (s *scanJob) getFileFS(f *BaseFile) (FS, error) { return fs.OpenZip(zipPath) } -func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (File, error) { - var others []File +func (s *scanJob) handleRename(ctx context.Context, f models.File, fp []models.Fingerprint) (models.File, error) { + var others []models.File for _, tfp := range fp { - thisOthers, err := s.Repository.FindByFingerprint(ctx, tfp) + thisOthers, err := s.Repository.FileStore.FindByFingerprint(ctx, tfp) if err != nil { return nil, fmt.Errorf("getting files by fingerprint %v: %w", tfp, err) } @@ -817,7 +841,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F others = appendFileUnique(others, thisOthers) } - var missing []File + var missing []models.File fZipID := f.Base().ZipFileID for _, other := range others { @@ -867,7 +891,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F fBase.Fingerprints = otherBase.Fingerprints if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, f); err != nil { + if err := s.Repository.FileStore.Update(ctx, f); err != nil { return fmt.Errorf("updating file for rename %q: %w", fBase.Path, err) } @@ -889,7 +913,7 @@ func (s *scanJob) handleRename(ctx context.Context, f File, fp []Fingerprint) (F return f, nil } -func (s *scanJob) isHandlerRequired(ctx context.Context, f File) bool { +func (s *scanJob) isHandlerRequired(ctx context.Context, f models.File) bool { accept := len(s.options.HandlerRequiredFilters) == 0 for _, filter := range s.options.HandlerRequiredFilters { // accept if any filter accepts the file @@ -910,7 +934,7 @@ func (s *scanJob) isHandlerRequired(ctx context.Context, f File) bool { // - file size // - image format, width or height // - video codec, audio codec, format, width, height, framerate or bitrate -func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing File) bool { +func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing models.File) bool { for _, h := range s.FileDecorators { if h.IsMissingMetadata(ctx, f.fs, existing) { return true @@ -920,7 +944,7 @@ func (s *scanJob) isMissingMetadata(ctx context.Context, f scanFile, existing Fi return false } -func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing models.File) (models.File, error) { path := existing.Base().Path logger.Infof("Updating metadata for %s", path) @@ -934,7 +958,7 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F // queue file for update if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -946,7 +970,7 @@ func (s *scanJob) setMissingMetadata(ctx context.Context, f scanFile, existing F return existing, nil } -func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existing models.File) (models.File, error) { const useExisting = true fp, err := s.calculateFingerprints(f.fs, existing.Base(), f.Path, useExisting) if err != nil { @@ -957,7 +981,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi existing.SetFingerprints(fp) if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", f.Path, err) } @@ -971,7 +995,7 @@ func (s *scanJob) setMissingFingerprints(ctx context.Context, f scanFile, existi } // returns a file only if it was updated -func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { base := existing.Base() path := base.Path @@ -1006,7 +1030,7 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) // queue file for update if err := s.withTxn(ctx, func(ctx context.Context) error { - if err := s.Repository.Update(ctx, existing); err != nil { + if err := s.Repository.FileStore.Update(ctx, existing); err != nil { return fmt.Errorf("updating file %q: %w", path, err) } @@ -1022,21 +1046,21 @@ func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) return existing, nil } -func (s *scanJob) removeOutdatedFingerprints(existing File, fp Fingerprints) { +func (s *scanJob) removeOutdatedFingerprints(existing models.File, fp models.Fingerprints) { // HACK - if no MD5 fingerprint was returned, and the oshash is changed // then remove the MD5 fingerprint - oshash := fp.For(FingerprintTypeOshash) + oshash := fp.For(models.FingerprintTypeOshash) if oshash == nil { return } - existingOshash := existing.Base().Fingerprints.For(FingerprintTypeOshash) + existingOshash := existing.Base().Fingerprints.For(models.FingerprintTypeOshash) if existingOshash == nil || *existingOshash == *oshash { // missing oshash or same oshash - nothing to do return } - md5 := fp.For(FingerprintTypeMD5) + md5 := fp.For(models.FingerprintTypeMD5) if md5 != nil { // nothing to do @@ -1045,11 +1069,11 @@ func (s *scanJob) removeOutdatedFingerprints(existing File, fp Fingerprints) { // oshash has changed, MD5 is missing - remove MD5 from the existing fingerprints logger.Infof("Removing outdated checksum from %s", existing.Base().Path) - existing.Base().Fingerprints.Remove(FingerprintTypeMD5) + existing.Base().Fingerprints.Remove(models.FingerprintTypeMD5) } // returns a file only if it was updated -func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing File) (File, error) { +func (s *scanJob) onUnchangedFile(ctx context.Context, f scanFile, existing models.File) (models.File, error) { var err error isMissingMetdata := s.isMissingMetadata(ctx, f, existing) diff --git a/pkg/file/video/caption.go b/pkg/file/video/caption.go index d2f8e79a5b5..bec3db6fd64 100644 --- a/pkg/file/video/caption.go +++ b/pkg/file/video/caption.go @@ -9,7 +9,6 @@ import ( "strings" "github.com/asticode/go-astisub" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/txn" @@ -87,12 +86,12 @@ func getCaptionsLangFromPath(captionPath string) string { } type CaptionUpdater interface { - GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) - UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error + GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) + UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error } // associates captions to scene/s with the same basename -func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb file.Getter, w CaptionUpdater) { +func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb models.FileFinder, w CaptionUpdater) { captionLang := getCaptionsLangFromPath(captionPath) captionPrefix := getCaptionPrefix(captionPath) @@ -108,7 +107,7 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag // found some files // filter out non video files switch f.(type) { - case *file.VideoFile: + case *models.VideoFile: break default: continue @@ -143,7 +142,7 @@ func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manag } // CleanCaptions removes non existent/accessible language codes from captions -func CleanCaptions(ctx context.Context, f *file.VideoFile, txnMgr txn.Manager, w CaptionUpdater) error { +func CleanCaptions(ctx context.Context, f *models.VideoFile, txnMgr txn.Manager, w CaptionUpdater) error { captions, err := w.GetCaptions(ctx, f.ID) if err != nil { return fmt.Errorf("getting captions for file %s: %w", f.Path, err) diff --git a/pkg/file/video/scan.go b/pkg/file/video/scan.go index 1f3d7817f35..ca7d0be963a 100644 --- a/pkg/file/video/scan.go +++ b/pkg/file/video/scan.go @@ -7,6 +7,7 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" ) // Decorator adds video specific fields to a File. @@ -14,7 +15,7 @@ type Decorator struct { FFProbe ffmpeg.FFProbe } -func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) { +func (d *Decorator) Decorate(ctx context.Context, fs models.FS, f models.File) (models.File, error) { if d.FFProbe == "" { return f, errors.New("ffprobe not configured") } @@ -42,7 +43,7 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file interactive = true } - return &file.VideoFile{ + return &models.VideoFile{ BaseFile: base, Format: string(container), VideoCodec: videoFile.VideoCodec, @@ -56,13 +57,13 @@ func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file }, nil } -func (d *Decorator) IsMissingMetadata(ctx context.Context, fs file.FS, f file.File) bool { +func (d *Decorator) IsMissingMetadata(ctx context.Context, fs models.FS, f models.File) bool { const ( unsetString = "unset" unsetNumber = -1 ) - vf, ok := f.(*file.VideoFile) + vf, ok := f.(*models.VideoFile) if !ok { return true } diff --git a/pkg/file/video_file.go b/pkg/file/video_file.go deleted file mode 100644 index 382c81e199c..00000000000 --- a/pkg/file/video_file.go +++ /dev/null @@ -1,29 +0,0 @@ -package file - -// VideoFile is an extension of BaseFile to represent video files. -type VideoFile struct { - *BaseFile - Format string `json:"format"` - Width int `json:"width"` - Height int `json:"height"` - Duration float64 `json:"duration"` - VideoCodec string `json:"video_codec"` - AudioCodec string `json:"audio_codec"` - FrameRate float64 `json:"frame_rate"` - BitRate int64 `json:"bitrate"` - - Interactive bool `json:"interactive"` - InteractiveSpeed *int `json:"interactive_speed"` -} - -func (f VideoFile) GetWidth() int { - return f.Width -} - -func (f VideoFile) GetHeight() int { - return f.Height -} - -func (f VideoFile) GetFormat() string { - return f.Format -} diff --git a/pkg/file/walk.go b/pkg/file/walk.go index a73781d4548..3c6a157b758 100644 --- a/pkg/file/walk.go +++ b/pkg/file/walk.go @@ -6,6 +6,8 @@ import ( "os" "path/filepath" "sort" + + "github.com/stashapp/stash/pkg/models" ) // Modified from github.com/facebookgo/symwalk @@ -48,7 +50,7 @@ import ( // // Note that symwalk.Walk does not terminate if there are any non-terminating loops in // the file structure. -func walkSym(f FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) error { +func walkSym(f models.FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) error { symWalkFunc := func(path string, info fs.DirEntry, err error) error { if fname, err := filepath.Rel(filename, path); err == nil { @@ -80,7 +82,7 @@ func walkSym(f FS, filename string, linkDirname string, walkFn fs.WalkDirFunc) e } // symWalk extends filepath.Walk to also follow symlinks -func symWalk(fs FS, path string, walkFn fs.WalkDirFunc) error { +func symWalk(fs models.FS, path string, walkFn fs.WalkDirFunc) error { return walkSym(fs, path, path, walkFn) } @@ -93,7 +95,7 @@ func (d *statDirEntry) IsDir() bool { return d.info.IsDir() } func (d *statDirEntry) Type() fs.FileMode { return d.info.Mode().Type() } func (d *statDirEntry) Info() (fs.FileInfo, error) { return d.info, nil } -func fsWalk(f FS, root string, fn fs.WalkDirFunc) error { +func fsWalk(f models.FS, root string, fn fs.WalkDirFunc) error { info, err := f.Lstat(root) if err != nil { err = fn(root, nil, err) @@ -106,7 +108,7 @@ func fsWalk(f FS, root string, fn fs.WalkDirFunc) error { return err } -func walkDir(f FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { +func walkDir(f models.FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { if err := walkDirFn(path, d, nil); err != nil || !d.IsDir() { if errors.Is(err, fs.SkipDir) && d.IsDir() { // Successfully skipped directory. @@ -143,7 +145,7 @@ func walkDir(f FS, path string, d fs.DirEntry, walkDirFn fs.WalkDirFunc) error { // readDir reads the directory named by dirname and returns // a sorted list of directory entries. -func readDir(fs FS, dirname string) ([]fs.DirEntry, error) { +func readDir(fs models.FS, dirname string) ([]fs.DirEntry, error) { f, err := fs.Open(dirname) if err != nil { return nil, err diff --git a/pkg/file/zip.go b/pkg/file/zip.go index 5cef1184ef1..a17b596852f 100644 --- a/pkg/file/zip.go +++ b/pkg/file/zip.go @@ -10,6 +10,7 @@ import ( "path/filepath" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" "github.com/xWTF/chardet" "golang.org/x/net/html/charset" @@ -22,14 +23,14 @@ var ( ) // ZipFS is a file system backed by a zip file. -type ZipFS struct { +type zipFS struct { *zip.Reader zipFileCloser io.Closer zipInfo fs.FileInfo zipPath string } -func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { +func newZipFS(fs models.FS, path string, info fs.FileInfo) (*zipFS, error) { reader, err := fs.Open(path) if err != nil { return nil, err @@ -85,7 +86,7 @@ func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { } } - return &ZipFS{ + return &zipFS{ Reader: zipReader, zipFileCloser: reader, zipInfo: info, @@ -93,7 +94,7 @@ func newZipFS(fs FS, path string, info fs.FileInfo) (*ZipFS, error) { }, nil } -func (f *ZipFS) rel(name string) (string, error) { +func (f *zipFS) rel(name string) (string, error) { if f.zipPath == name { return ".", nil } @@ -110,7 +111,7 @@ func (f *ZipFS) rel(name string) (string, error) { return relName, nil } -func (f *ZipFS) Stat(name string) (fs.FileInfo, error) { +func (f *zipFS) Stat(name string) (fs.FileInfo, error) { reader, err := f.Open(name) if err != nil { return nil, err @@ -120,15 +121,15 @@ func (f *ZipFS) Stat(name string) (fs.FileInfo, error) { return reader.Stat() } -func (f *ZipFS) Lstat(name string) (fs.FileInfo, error) { +func (f *zipFS) Lstat(name string) (fs.FileInfo, error) { return f.Stat(name) } -func (f *ZipFS) OpenZip(name string) (*ZipFS, error) { +func (f *zipFS) OpenZip(name string) (models.ZipFS, error) { return nil, errZipFSOpenZip } -func (f *ZipFS) IsPathCaseSensitive(path string) (bool, error) { +func (f *zipFS) IsPathCaseSensitive(path string) (bool, error) { return true, nil } @@ -145,7 +146,7 @@ func (f *zipReadDirFile) ReadDir(n int) ([]fs.DirEntry, error) { return asReadDirFile.ReadDir(n) } -func (f *ZipFS) Open(name string) (fs.ReadDirFile, error) { +func (f *zipFS) Open(name string) (fs.ReadDirFile, error) { relName, err := f.rel(name) if err != nil { return nil, err @@ -161,12 +162,12 @@ func (f *ZipFS) Open(name string) (fs.ReadDirFile, error) { }, nil } -func (f *ZipFS) Close() error { +func (f *zipFS) Close() error { return f.zipFileCloser.Close() } // openOnly returns a ReadCloser where calling Close will close the zip fs as well. -func (f *ZipFS) OpenOnly(name string) (io.ReadCloser, error) { +func (f *zipFS) OpenOnly(name string) (io.ReadCloser, error) { r, err := f.Open(name) if err != nil { return nil, err diff --git a/pkg/gallery/chapter_import.go b/pkg/gallery/chapter_import.go index 91abe909de0..ee223b1aa9f 100644 --- a/pkg/gallery/chapter_import.go +++ b/pkg/gallery/chapter_import.go @@ -8,15 +8,14 @@ import ( "github.com/stashapp/stash/pkg/models/jsonschema" ) -type ChapterCreatorUpdater interface { - Create(ctx context.Context, newGalleryChapter *models.GalleryChapter) error - Update(ctx context.Context, updatedGalleryChapter *models.GalleryChapter) error +type ChapterImporterReaderWriter interface { + models.GalleryChapterCreatorUpdater FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) } type ChapterImporter struct { GalleryID int - ReaderWriter ChapterCreatorUpdater + ReaderWriter ChapterImporterReaderWriter Input jsonschema.GalleryChapter MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/gallery/delete.go b/pkg/gallery/delete.go index 60aee0d2853..5609b2f4bac 100644 --- a/pkg/gallery/delete.go +++ b/pkg/gallery/delete.go @@ -41,12 +41,7 @@ func (s *Service) Destroy(ctx context.Context, i *models.Gallery, fileDeleter *i return imgsDestroyed, nil } -type ChapterDestroyer interface { - FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) - Destroy(ctx context.Context, id int) error -} - -func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, qb ChapterDestroyer) error { +func DestroyChapter(ctx context.Context, galleryChapter *models.GalleryChapter, qb models.GalleryChapterDestroyer) error { return qb.Destroy(ctx, galleryChapter.ID) } diff --git a/pkg/gallery/export.go b/pkg/gallery/export.go index d53a2a8e585..83f3c31cebc 100644 --- a/pkg/gallery/export.go +++ b/pkg/gallery/export.go @@ -7,13 +7,8 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" ) -type ChapterFinder interface { - FindByGalleryID(ctx context.Context, galleryID int) ([]*models.GalleryChapter, error) -} - // ToBasicJSON converts a gallery object into its JSON object equivalent. It // does not convert the relationships to other objects. func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { @@ -48,7 +43,7 @@ func ToBasicJSON(gallery *models.Gallery) (*jsonschema.Gallery, error) { // GetStudioName returns the name of the provided gallery's studio. It returns an // empty string if there is no studio assigned to the gallery. -func GetStudioName(ctx context.Context, reader studio.Finder, gallery *models.Gallery) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, gallery *models.Gallery) (string, error) { if gallery.StudioID != nil { studio, err := reader.Find(ctx, *gallery.StudioID) if err != nil { @@ -65,7 +60,7 @@ func GetStudioName(ctx context.Context, reader studio.Finder, gallery *models.Ga // GetGalleryChaptersJSON returns a slice of GalleryChapter JSON representation // objects corresponding to the provided gallery's chapters. -func GetGalleryChaptersJSON(ctx context.Context, chapterReader ChapterFinder, gallery *models.Gallery) ([]jsonschema.GalleryChapter, error) { +func GetGalleryChaptersJSON(ctx context.Context, chapterReader models.GalleryChapterFinder, gallery *models.Gallery) ([]jsonschema.GalleryChapter, error) { galleryChapters, err := chapterReader.FindByGalleryID(ctx, gallery.ID) if err != nil { return nil, fmt.Errorf("error getting gallery chapters: %v", err) diff --git a/pkg/gallery/export_test.go b/pkg/gallery/export_test.go index fcd90b9e98c..3a6ffa2ec55 100644 --- a/pkg/gallery/export_test.go +++ b/pkg/gallery/export_test.go @@ -3,7 +3,6 @@ package gallery import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -50,8 +49,8 @@ var ( func createFullGallery(id int) models.Gallery { return models.Gallery{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), @@ -69,8 +68,8 @@ func createFullGallery(id int) models.Gallery { func createEmptyGallery(id int) models.Gallery { return models.Gallery{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index ccb258eb0a1..57d151245b5 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -5,22 +5,25 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) +type ImporterReaderWriter interface { + models.GalleryCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) + FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) +} + type Importer struct { - ReaderWriter FullCreatorUpdater - StudioWriter studio.NameFinderCreator - PerformerWriter performer.NameFinderCreator - TagWriter tag.NameFinderCreator - FileFinder file.Getter - FolderFinder file.FolderGetter + ReaderWriter ImporterReaderWriter + StudioWriter models.StudioFinderCreator + PerformerWriter models.PerformerFinderCreator + TagWriter models.TagFinderCreator + FileFinder models.FileFinder + FolderFinder models.FolderFinder Input jsonschema.Gallery MissingRefBehaviour models.ImportMissingRefEnum @@ -28,11 +31,6 @@ type Importer struct { gallery models.Gallery } -type FullCreatorUpdater interface { - FinderCreatorUpdater - Update(ctx context.Context, updatedGallery *models.Gallery) error -} - func (i *Importer) PreImport(ctx context.Context) error { i.gallery = i.galleryJSONToGallery(i.Input) @@ -251,7 +249,7 @@ func (i *Importer) createTags(ctx context.Context, names []string) ([]*models.Ta } func (i *Importer) populateFilesFolder(ctx context.Context) error { - files := make([]file.File, 0) + files := make([]models.File, 0) for _, ref := range i.Input.ZipFiles { path := ref @@ -340,7 +338,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.gallery.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index bfbdefa9e42..0997b4a57e2 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -6,7 +6,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -68,7 +67,7 @@ func TestImporterPreImport(t *testing.T) { Rating: &rating, Organized: organized, URL: url, - Files: models.NewRelatedFiles([]file.File{}), + Files: models.NewRelatedFiles([]models.File{}), TagIDs: models.NewRelatedIDs([]int{}), PerformerIDs: models.NewRelatedIDs([]int{}), CreatedAt: createdAt, diff --git a/pkg/gallery/query.go b/pkg/gallery/query.go index cc2a043d757..da0b5f0c101 100644 --- a/pkg/gallery/query.go +++ b/pkg/gallery/query.go @@ -4,27 +4,10 @@ import ( "context" "strconv" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) ([]*models.Gallery, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, galleryFilter *models.GalleryFilterType, findFilter *models.FindFilterType) (int, error) -} - -type Finder interface { - FindByPath(ctx context.Context, p string) ([]*models.Gallery, error) - FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) -} - -func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByPerformerID(ctx context.Context, r models.GalleryQueryer, id int) (int, error) { filter := &models.GalleryFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -35,7 +18,7 @@ func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error return r.QueryCount(ctx, filter, nil) } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.GalleryQueryer, id int, depth *int) (int, error) { filter := &models.GalleryFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -47,7 +30,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.GalleryQueryer, id int, depth *int) (int, error) { filter := &models.GalleryFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/gallery/scan.go b/pkg/gallery/scan.go index 8a35890eea8..a8f52e89bb6 100644 --- a/pkg/gallery/scan.go +++ b/pkg/gallery/scan.go @@ -7,39 +7,40 @@ import ( "strings" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" ) -type FinderCreatorUpdater interface { - Finder - Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) + GetFiles(ctx context.Context, relatedID int) ([]models.File, error) + + Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.FileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } -type SceneFinderUpdater interface { +type ScanSceneFinderUpdater interface { FindByPath(ctx context.Context, p string) ([]*models.Scene, error) Update(ctx context.Context, updatedScene *models.Scene) error AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error } -type ImageFinderUpdater interface { - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) +type ScanImageFinderUpdater interface { + FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) } type ScanHandler struct { - CreatorUpdater FullCreatorUpdater - SceneFinderUpdater SceneFinderUpdater - ImageFinderUpdater ImageFinderUpdater + CreatorUpdater ScanCreatorUpdater + SceneFinderUpdater ScanSceneFinderUpdater + ImageFinderUpdater ScanImageFinderUpdater PluginCache *plugin.Cache } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { baseFile := f.Base() // try to match the file to a gallery @@ -83,7 +84,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File logger.Infof("%s doesn't exist. Creating new gallery...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, newGallery, []file.ID{baseFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, newGallery, []models.FileID{baseFile.ID}); err != nil { return fmt.Errorf("creating new gallery: %w", err) } @@ -112,7 +113,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f file.File, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Gallery, f models.File, updateExisting bool) error { for _, i := range existing { if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -146,7 +147,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return nil } -func (h *ScanHandler) associateScene(ctx context.Context, existing []*models.Gallery, f file.File) error { +func (h *ScanHandler) associateScene(ctx context.Context, existing []*models.Gallery, f models.File) error { galleryIDs := make([]int, len(existing)) for i, g := range existing { galleryIDs[i] = g.ID diff --git a/pkg/gallery/service.go b/pkg/gallery/service.go index 7dfc3857f5d..6db604fc4d6 100644 --- a/pkg/gallery/service.go +++ b/pkg/gallery/service.go @@ -3,50 +3,25 @@ package gallery import ( "context" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/image" "github.com/stashapp/stash/pkg/models" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) -} - -type Repository interface { - models.GalleryFinder - FinderByFile - Destroy(ctx context.Context, id int) error - models.FileLoader - ImageUpdater - PartialUpdater -} - -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) -} - type ImageFinder interface { - FindByFolderID(ctx context.Context, folder file.FolderID) ([]*models.Image, error) - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) + FindByFolderID(ctx context.Context, folder models.FolderID) ([]*models.Image, error) + FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) models.GalleryIDLoader } type ImageService interface { Destroy(ctx context.Context, i *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error - DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) -} - -type ChapterRepository interface { - ChapterFinder - ChapterDestroyer - - Update(ctx context.Context, updatedObject models.GalleryChapter) (*models.GalleryChapter, error) + DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *image.FileDeleter, deleteGenerated bool) ([]*models.Image, error) } type Service struct { - Repository Repository + Repository models.GalleryReaderWriter ImageFinder ImageFinder ImageService ImageService - File file.Store - Folder file.FolderStore + File models.FileReaderWriter + Folder models.FolderReaderWriter } diff --git a/pkg/gallery/update.go b/pkg/gallery/update.go index 72f479bea99..71d92c5409b 100644 --- a/pkg/gallery/update.go +++ b/pkg/gallery/update.go @@ -54,7 +54,7 @@ func (s *Service) RemoveImages(ctx context.Context, g *models.Gallery, toRemove return s.Updated(ctx, g.ID) } -func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, performerID int) error { +func AddPerformer(ctx context.Context, qb models.GalleryUpdater, o *models.Gallery, performerID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{ PerformerIDs: &models.UpdateIDs{ IDs: []int{performerID}, @@ -64,7 +64,7 @@ func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Gallery, per return err } -func AddTag(ctx context.Context, qb PartialUpdater, o *models.Gallery, tagID int) error { +func AddTag(ctx context.Context, qb models.GalleryUpdater, o *models.Gallery, tagID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.GalleryPartial{ TagIDs: &models.UpdateIDs{ IDs: []int{tagID}, diff --git a/pkg/hash/videophash/phash.go b/pkg/hash/videophash/phash.go index 0cbefc2ae41..6e586b17869 100644 --- a/pkg/hash/videophash/phash.go +++ b/pkg/hash/videophash/phash.go @@ -13,8 +13,8 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg" "github.com/stashapp/stash/pkg/ffmpeg/transcoder" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" ) const ( @@ -23,7 +23,7 @@ const ( rows = 5 ) -func Generate(encoder *ffmpeg.FFMpeg, videoFile *file.VideoFile) (*uint64, error) { +func Generate(encoder *ffmpeg.FFMpeg, videoFile *models.VideoFile) (*uint64, error) { sprite, err := generateSprite(encoder, videoFile) if err != nil { return nil, err @@ -76,7 +76,7 @@ func combineImages(images []image.Image) image.Image { return montage } -func generateSprite(encoder *ffmpeg.FFMpeg, videoFile *file.VideoFile) (image.Image, error) { +func generateSprite(encoder *ffmpeg.FFMpeg, videoFile *models.VideoFile) (image.Image, error) { logger.Infof("[generator] generating phash sprite for %s", videoFile.Path) // Generate sprite image offset by 5% on each end to avoid intro/outros diff --git a/pkg/image/delete.go b/pkg/image/delete.go index 78ef4b09ab6..89f4c181153 100644 --- a/pkg/image/delete.go +++ b/pkg/image/delete.go @@ -10,10 +10,6 @@ import ( "github.com/stashapp/stash/pkg/models/paths" ) -type Destroyer interface { - Destroy(ctx context.Context, id int) error -} - // FileDeleter is an extension of file.Deleter that handles deletion of image files. type FileDeleter struct { *file.Deleter @@ -45,7 +41,7 @@ func (s *Service) Destroy(ctx context.Context, i *models.Image, fileDeleter *Fil // DestroyZipImages destroys all images in zip, optionally marking the files and generated files for deletion. // Returns a slice of images that were destroyed. -func (s *Service) DestroyZipImages(ctx context.Context, zipFile file.File, fileDeleter *FileDeleter, deleteGenerated bool) ([]*models.Image, error) { +func (s *Service) DestroyZipImages(ctx context.Context, zipFile models.File, fileDeleter *FileDeleter, deleteGenerated bool) ([]*models.Image, error) { var imgsDestroyed []*models.Image imgs, err := s.Repository.FindByZipFileID(ctx, zipFile.Base().ID) diff --git a/pkg/image/export.go b/pkg/image/export.go index d67351e8dfb..a7c4d8575eb 100644 --- a/pkg/image/export.go +++ b/pkg/image/export.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" ) // ToBasicJSON converts a image object into its JSON object equivalent. It @@ -53,7 +52,7 @@ func ToBasicJSON(image *models.Image) *jsonschema.Image { // GetStudioName returns the name of the provided image's studio. It returns an // empty string if there is no studio assigned to the image. -func GetStudioName(ctx context.Context, reader studio.Finder, image *models.Image) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, image *models.Image) (string, error) { if image.StudioID != nil { studio, err := reader.Find(ctx, *image.StudioID) if err != nil { diff --git a/pkg/image/export_test.go b/pkg/image/export_test.go index 4c46aae9578..3b64f40cbcd 100644 --- a/pkg/image/export_test.go +++ b/pkg/image/export_test.go @@ -3,7 +3,6 @@ package image import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -45,8 +44,8 @@ var ( func createFullImage(id int) models.Image { return models.Image{ ID: id, - Files: models.NewRelatedFiles([]file.File{ - &file.BaseFile{ + Files: models.NewRelatedFiles([]models.File{ + &models.BaseFile{ Path: path, }, }), diff --git a/pkg/image/import.go b/pkg/image/import.go index 3c1e7ac8b53..4ce2287eb7b 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -5,13 +5,9 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) type GalleryFinder interface { @@ -19,18 +15,18 @@ type GalleryFinder interface { FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) } -type FullCreatorUpdater interface { - FinderCreatorUpdater - Update(ctx context.Context, updatedImage *models.Image) error +type ImporterReaderWriter interface { + models.ImageCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) } type Importer struct { - ReaderWriter FullCreatorUpdater - FileFinder file.Getter - StudioWriter studio.NameFinderCreator + ReaderWriter ImporterReaderWriter + FileFinder models.FileFinder + StudioWriter models.StudioFinderCreator GalleryFinder GalleryFinder - PerformerWriter performer.NameFinderCreator - TagWriter tag.NameFinderCreator + PerformerWriter models.PerformerFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.Image MissingRefBehaviour models.ImportMissingRefEnum @@ -99,7 +95,7 @@ func (i *Importer) imageJSONToImage(imageJSON jsonschema.Image) models.Image { } func (i *Importer) populateFiles(ctx context.Context) error { - files := make([]file.File, 0) + files := make([]models.File, 0) for _, ref := range i.Input.Files { path := ref @@ -330,7 +326,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.image.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } @@ -360,7 +356,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -395,7 +391,7 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { newTag := models.NewTag(name) diff --git a/pkg/image/query.go b/pkg/image/query.go index 85d1df05c25..a5c9a17322f 100644 --- a/pkg/image/query.go +++ b/pkg/image/query.go @@ -7,14 +7,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, options models.ImageQueryOptions) (*models.ImageQueryResult, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) (int, error) -} - // QueryOptions returns a ImageQueryResult populated with the provided filters. func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFilterType, count bool) models.ImageQueryOptions { return models.ImageQueryOptions{ @@ -27,7 +19,7 @@ func QueryOptions(imageFilter *models.ImageFilterType, findFilter *models.FindFi } // Query queries for images using the provided filters. -func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { +func Query(ctx context.Context, qb models.ImageQueryer, imageFilter *models.ImageFilterType, findFilter *models.FindFilterType) ([]*models.Image, error) { result, err := qb.Query(ctx, QueryOptions(imageFilter, findFilter, false)) if err != nil { return nil, err @@ -41,7 +33,7 @@ func Query(ctx context.Context, qb Queryer, imageFilter *models.ImageFilterType, return images, nil } -func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByPerformerID(ctx context.Context, r models.ImageQueryer, id int) (int, error) { filter := &models.ImageFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -52,7 +44,7 @@ func CountByPerformerID(ctx context.Context, r CountQueryer, id int) (int, error return r.QueryCount(ctx, filter, nil) } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -64,7 +56,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.ImageQueryer, id int, depth *int) (int, error) { filter := &models.ImageFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -76,7 +68,7 @@ func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, return r.QueryCount(ctx, filter, nil) } -func FindByGalleryID(ctx context.Context, r Queryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { +func FindByGalleryID(ctx context.Context, r models.ImageQueryer, galleryID int, sortBy string, sortDir models.SortDirectionEnum) ([]*models.Image, error) { perPage := -1 findFilter := models.FindFilterType{ @@ -99,7 +91,7 @@ func FindByGalleryID(ctx context.Context, r Queryer, galleryID int, sortBy strin }, &findFilter) } -func FindGalleryCover(ctx context.Context, r Queryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { +func FindGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, galleryCoverRegex string) (*models.Image, error) { const useCoverJpg = true img, err := findGalleryCover(ctx, r, galleryID, useCoverJpg, galleryCoverRegex) if err != nil { @@ -114,7 +106,7 @@ func FindGalleryCover(ctx context.Context, r Queryer, galleryID int, galleryCove return findGalleryCover(ctx, r, galleryID, !useCoverJpg, galleryCoverRegex) } -func findGalleryCover(ctx context.Context, r Queryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { +func findGalleryCover(ctx context.Context, r models.ImageQueryer, galleryID int, useCoverJpg bool, galleryCoverRegex string) (*models.Image, error) { // try to find cover.jpg in the gallery perPage := 1 sortBy := "path" diff --git a/pkg/image/scan.go b/pkg/image/scan.go index d28d94a86c0..d584d0f55fa 100644 --- a/pkg/image/scan.go +++ b/pkg/image/scan.go @@ -8,7 +8,6 @@ import ( "path/filepath" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" @@ -21,21 +20,22 @@ var ( ErrNotImageFile = errors.New("not an image file") ) -type FinderCreatorUpdater interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Image, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) + GetFiles(ctx context.Context, relatedID int) ([]models.File, error) + GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) + Create(ctx context.Context, newImage *models.ImageCreateInput) error UpdatePartial(ctx context.Context, id int, updatedImage models.ImagePartial) (*models.Image, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.GalleryIDLoader - models.FileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } type GalleryFinderCreator interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) - FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) - Create(ctx context.Context, newObject *models.Gallery, fileIDs []file.ID) error + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) + FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) + Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedGallery models.GalleryPartial) (*models.Gallery, error) } @@ -44,11 +44,11 @@ type ScanConfig interface { } type ScanGenerator interface { - Generate(ctx context.Context, i *models.Image, f file.File) error + Generate(ctx context.Context, i *models.Image, f models.File) error } type ScanHandler struct { - CreatorUpdater FinderCreatorUpdater + CreatorUpdater ScanCreatorUpdater GalleryFinder GalleryFinderCreator ScanGenerator ScanGenerator @@ -80,7 +80,7 @@ func (h *ScanHandler) validate() error { return nil } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if err := h.validate(); err != nil { return err } @@ -130,7 +130,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File if err := h.CreatorUpdater.Create(ctx, &models.ImageCreateInput{ Image: newImage, - FileIDs: []file.ID{imageFile.ID}, + FileIDs: []models.FileID{imageFile.ID}, }); err != nil { return fmt.Errorf("creating new image: %w", err) } @@ -151,8 +151,8 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File // remove the old thumbnail if the checksum changed - we'll regenerate it if oldFile != nil { - oldHash := oldFile.Base().Fingerprints.GetString(file.FingerprintTypeMD5) - newHash := f.Base().Fingerprints.GetString(file.FingerprintTypeMD5) + oldHash := oldFile.Base().Fingerprints.GetString(models.FingerprintTypeMD5) + newHash := f.Base().Fingerprints.GetString(models.FingerprintTypeMD5) if oldHash != "" && newHash != "" && oldHash != newHash { // remove cache dir of gallery @@ -173,7 +173,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *file.BaseFile, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Image, f *models.BaseFile, updateExisting bool) error { for _, i := range existing { if err := i.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err @@ -239,7 +239,7 @@ func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models. return nil } -func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateFolderBasedGallery(ctx context.Context, f models.File) (*models.Gallery, error) { folderID := f.Base().ParentFolderID g, err := h.GalleryFinder.FindByFolderID(ctx, folderID) if err != nil { @@ -299,7 +299,7 @@ func (h *ScanHandler) associateFolderImages(ctx context.Context, g *models.Galle return nil } -func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile models.File) (*models.Gallery, error) { g, err := h.GalleryFinder.FindByFileID(ctx, zipFile.Base().ID) if err != nil { return nil, fmt.Errorf("finding zip based gallery: %w", err) @@ -319,7 +319,7 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile fi logger.Infof("%s doesn't exist. Creating new gallery...", zipFile.Base().Path) - if err := h.GalleryFinder.Create(ctx, newGallery, []file.ID{zipFile.Base().ID}); err != nil { + if err := h.GalleryFinder.Create(ctx, newGallery, []models.FileID{zipFile.Base().ID}); err != nil { return nil, fmt.Errorf("creating zip-based gallery: %w", err) } @@ -328,7 +328,7 @@ func (h *ScanHandler) getOrCreateZipBasedGallery(ctx context.Context, zipFile fi return newGallery, nil } -func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f models.File) (*models.Gallery, error) { // don't create folder-based galleries for files in zip file if f.Base().ZipFile != nil { return h.getOrCreateZipBasedGallery(ctx, f.Base().ZipFile) @@ -357,7 +357,7 @@ func (h *ScanHandler) getOrCreateGallery(ctx context.Context, f file.File) (*mod return nil, nil } -func (h *ScanHandler) getGalleryToAssociate(ctx context.Context, newImage *models.Image, f file.File) (*models.Gallery, error) { +func (h *ScanHandler) getGalleryToAssociate(ctx context.Context, newImage *models.Image, f models.File) (*models.Gallery, error) { g, err := h.getOrCreateGallery(ctx, f) if err != nil { return nil, err diff --git a/pkg/image/service.go b/pkg/image/service.go index 5aacc4e59c2..55dc7686d1c 100644 --- a/pkg/image/service.go +++ b/pkg/image/service.go @@ -1,24 +1,10 @@ package image import ( - "context" - - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) - FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) -} - -type Repository interface { - FinderByFile - Destroyer - models.FileLoader -} - type Service struct { - File file.Store - Repository Repository + File models.FileReaderWriter + Repository models.ImageReaderWriter } diff --git a/pkg/image/thumbnail.go b/pkg/image/thumbnail.go index dc07b0f5537..16191fa55bb 100644 --- a/pkg/image/thumbnail.go +++ b/pkg/image/thumbnail.go @@ -14,6 +14,7 @@ import ( "github.com/stashapp/stash/pkg/ffmpeg/transcoder" "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" + "github.com/stashapp/stash/pkg/models" ) const ffmpegImageQuality = 5 @@ -68,7 +69,7 @@ func NewThumbnailEncoder(ffmpegEncoder *ffmpeg.FFMpeg, ffProbe ffmpeg.FFProbe, c // the provided max size. It resizes based on the largest X/Y direction. // It returns nil and an error if an error occurs reading, decoding or encoding // the image, or if the image is not suitable for thumbnails. -func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error) { +func (e *ThumbnailEncoder) GetThumbnail(f models.File, maxSize int) ([]byte, error) { reader, err := f.Open(&file.OsFS{}) if err != nil { return nil, err @@ -82,7 +83,7 @@ func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error data := buf.Bytes() - if imageFile, ok := f.(*file.ImageFile); ok { + if imageFile, ok := f.(*models.ImageFile); ok { format := imageFile.Format animated := imageFile.Format == formatGif @@ -98,7 +99,7 @@ func (e *ThumbnailEncoder) GetThumbnail(f file.File, maxSize int) ([]byte, error } // Videofiles can only be thumbnailed with ffmpeg - if _, ok := f.(*file.VideoFile); ok { + if _, ok := f.(*models.VideoFile); ok { return e.ffmpegImageThumbnail(buf, maxSize) } diff --git a/pkg/image/update.go b/pkg/image/update.go index 3a173b7ad3d..e3a63b53d03 100644 --- a/pkg/image/update.go +++ b/pkg/image/update.go @@ -6,11 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error) -} - -func AddPerformer(ctx context.Context, qb PartialUpdater, i *models.Image, performerID int) error { +func AddPerformer(ctx context.Context, qb models.ImageUpdater, i *models.Image, performerID int) error { _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{ PerformerIDs: &models.UpdateIDs{ IDs: []int{performerID}, @@ -21,7 +17,7 @@ func AddPerformer(ctx context.Context, qb PartialUpdater, i *models.Image, perfo return err } -func AddTag(ctx context.Context, qb PartialUpdater, i *models.Image, tagID int) error { +func AddTag(ctx context.Context, qb models.ImageUpdater, i *models.Image, tagID int) error { _, err := qb.UpdatePartial(ctx, i.ID, models.ImagePartial{ TagIDs: &models.UpdateIDs{ IDs: []int{tagID}, diff --git a/pkg/match/cache.go b/pkg/match/cache.go index 06237c7f6ee..6946f65dbbc 100644 --- a/pkg/match/cache.go +++ b/pkg/match/cache.go @@ -20,7 +20,7 @@ type Cache struct { // against. This means that performers with single-letter words in their names could potentially // be missed. // This query is expensive, so it's queried once and cached, if the cache if provided. -func getSingleLetterPerformers(ctx context.Context, c *Cache, reader PerformerAutoTagQueryer) ([]*models.Performer, error) { +func getSingleLetterPerformers(ctx context.Context, c *Cache, reader models.PerformerAutoTagQueryer) ([]*models.Performer, error) { if c == nil { c = &Cache{} } @@ -53,7 +53,7 @@ func getSingleLetterPerformers(ctx context.Context, c *Cache, reader PerformerAu // getSingleLetterStudios returns all studios with names that start with single character words. // See getSingleLetterPerformers for details. -func getSingleLetterStudios(ctx context.Context, c *Cache, reader StudioAutoTagQueryer) ([]*models.Studio, error) { +func getSingleLetterStudios(ctx context.Context, c *Cache, reader models.StudioAutoTagQueryer) ([]*models.Studio, error) { if c == nil { c = &Cache{} } @@ -86,7 +86,7 @@ func getSingleLetterStudios(ctx context.Context, c *Cache, reader StudioAutoTagQ // getSingleLetterTags returns all tags with names that start with single character words. // See getSingleLetterPerformers for details. -func getSingleLetterTags(ctx context.Context, c *Cache, reader TagAutoTagQueryer) ([]*models.Tag, error) { +func getSingleLetterTags(ctx context.Context, c *Cache, reader models.TagAutoTagQueryer) ([]*models.Tag, error) { if c == nil { c = &Cache{} } diff --git a/pkg/match/path.go b/pkg/match/path.go index 666d643747a..0b5aaa6ba48 100644 --- a/pkg/match/path.go +++ b/pkg/match/path.go @@ -14,8 +14,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scene" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" ) const ( @@ -28,24 +26,6 @@ const ( var separatorRE = regexp.MustCompile(separatorPattern) -type PerformerAutoTagQueryer interface { - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Performer, error) - models.AliasLoader -} - -type StudioAutoTagQueryer interface { - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Studio, error) - studio.Queryer - GetAliases(ctx context.Context, studioID int) ([]string, error) -} - -type TagAutoTagQueryer interface { - QueryForAutoTag(ctx context.Context, words []string) ([]*models.Tag, error) - tag.Queryer - GetAliases(ctx context.Context, tagID int) ([]string, error) -} - func getPathQueryRegex(name string) string { // escape specific regex characters name = regexp.QuoteMeta(name) @@ -146,7 +126,7 @@ func regexpMatchesPath(r *regexp.Regexp, path string) int { return found[len(found)-1][0] } -func getPerformers(ctx context.Context, words []string, performerReader PerformerAutoTagQueryer, cache *Cache) ([]*models.Performer, error) { +func getPerformers(ctx context.Context, words []string, performerReader models.PerformerAutoTagQueryer, cache *Cache) ([]*models.Performer, error) { performers, err := performerReader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -160,7 +140,7 @@ func getPerformers(ctx context.Context, words []string, performerReader Performe return append(performers, swPerformers...), nil } -func PathToPerformers(ctx context.Context, path string, reader PerformerAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Performer, error) { +func PathToPerformers(ctx context.Context, path string, reader models.PerformerAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Performer, error) { words := getPathWords(path, trimExt) performers, err := getPerformers(ctx, words, reader, cache) @@ -198,7 +178,7 @@ func PathToPerformers(ctx context.Context, path string, reader PerformerAutoTagQ return ret, nil } -func getStudios(ctx context.Context, words []string, reader StudioAutoTagQueryer, cache *Cache) ([]*models.Studio, error) { +func getStudios(ctx context.Context, words []string, reader models.StudioAutoTagQueryer, cache *Cache) ([]*models.Studio, error) { studios, err := reader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -215,7 +195,7 @@ func getStudios(ctx context.Context, words []string, reader StudioAutoTagQueryer // PathToStudio returns the Studio that matches the given path. // Where multiple matching studios are found, the one that matches the latest // position in the path is returned. -func PathToStudio(ctx context.Context, path string, reader StudioAutoTagQueryer, cache *Cache, trimExt bool) (*models.Studio, error) { +func PathToStudio(ctx context.Context, path string, reader models.StudioAutoTagQueryer, cache *Cache, trimExt bool) (*models.Studio, error) { words := getPathWords(path, trimExt) candidates, err := getStudios(ctx, words, reader, cache) @@ -249,7 +229,7 @@ func PathToStudio(ctx context.Context, path string, reader StudioAutoTagQueryer, return ret, nil } -func getTags(ctx context.Context, words []string, reader TagAutoTagQueryer, cache *Cache) ([]*models.Tag, error) { +func getTags(ctx context.Context, words []string, reader models.TagAutoTagQueryer, cache *Cache) ([]*models.Tag, error) { tags, err := reader.QueryForAutoTag(ctx, words) if err != nil { return nil, err @@ -263,7 +243,7 @@ func getTags(ctx context.Context, words []string, reader TagAutoTagQueryer, cach return append(tags, swTags...), nil } -func PathToTags(ctx context.Context, path string, reader TagAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Tag, error) { +func PathToTags(ctx context.Context, path string, reader models.TagAutoTagQueryer, cache *Cache, trimExt bool) ([]*models.Tag, error) { words := getPathWords(path, trimExt) tags, err := getTags(ctx, words, reader, cache) @@ -299,7 +279,7 @@ func PathToTags(ctx context.Context, path string, reader TagAutoTagQueryer, cach return ret, nil } -func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReader scene.Queryer, fn func(ctx context.Context, scene *models.Scene) error) error { +func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReader models.SceneQueryer, fn func(ctx context.Context, scene *models.Scene) error) error { regex := getPathQueryRegex(name) organized := false filter := models.SceneFilterType{ @@ -358,7 +338,7 @@ func PathToScenesFn(ctx context.Context, name string, paths []string, sceneReade return nil } -func PathToImagesFn(ctx context.Context, name string, paths []string, imageReader image.Queryer, fn func(ctx context.Context, scene *models.Image) error) error { +func PathToImagesFn(ctx context.Context, name string, paths []string, imageReader models.ImageQueryer, fn func(ctx context.Context, scene *models.Image) error) error { regex := getPathQueryRegex(name) organized := false filter := models.ImageFilterType{ @@ -417,7 +397,7 @@ func PathToImagesFn(ctx context.Context, name string, paths []string, imageReade return nil } -func PathToGalleriesFn(ctx context.Context, name string, paths []string, galleryReader gallery.Queryer, fn func(ctx context.Context, scene *models.Gallery) error) error { +func PathToGalleriesFn(ctx context.Context, name string, paths []string, galleryReader models.GalleryQueryer, fn func(ctx context.Context, scene *models.Gallery) error) error { regex := getPathQueryRegex(name) organized := false filter := models.GalleryFilterType{ diff --git a/pkg/match/scraped.go b/pkg/match/scraped.go index d1182a32963..675a8d7fcdd 100644 --- a/pkg/match/scraped.go +++ b/pkg/match/scraped.go @@ -58,7 +58,7 @@ func ScrapedPerformer(ctx context.Context, qb PerformerFinder, p *models.Scraped } type StudioFinder interface { - studio.Queryer + models.StudioQueryer FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) } @@ -134,7 +134,7 @@ func ScrapedMovie(ctx context.Context, qb MovieNamesFinder, m *models.ScrapedMov // ScrapedTag matches the provided tag with the tags // in the database and sets the ID field if one is found. -func ScrapedTag(ctx context.Context, qb tag.Queryer, s *models.ScrapedTag) error { +func ScrapedTag(ctx context.Context, qb models.TagQueryer, s *models.ScrapedTag) error { if s.StoredID != nil { return nil } diff --git a/pkg/models/file.go b/pkg/models/file.go index 827a55d5ca4..e6ce41d1e97 100644 --- a/pkg/models/file.go +++ b/pkg/models/file.go @@ -4,8 +4,6 @@ import ( "context" "path/filepath" "strings" - - "github.com/stashapp/stash/pkg/file" ) type FileQueryOptions struct { @@ -57,24 +55,24 @@ func PathsFileFilter(paths []string) *FileFilterType { type FileQueryResult struct { // can't use QueryResult because id type is wrong - IDs []file.ID + IDs []FileID Count int - finder file.Finder - files []file.File + getter FileGetter + files []File resolveErr error } -func NewFileQueryResult(finder file.Finder) *FileQueryResult { +func NewFileQueryResult(fileGetter FileGetter) *FileQueryResult { return &FileQueryResult{ - finder: finder, + getter: fileGetter, } } -func (r *FileQueryResult) Resolve(ctx context.Context) ([]file.File, error) { +func (r *FileQueryResult) Resolve(ctx context.Context) ([]File, error) { // cache results if r.files == nil && r.resolveErr == nil { - r.files, r.resolveErr = r.finder.Find(ctx, r.IDs...) + r.files, r.resolveErr = r.getter.Find(ctx, r.IDs...) } return r.files, r.resolveErr } diff --git a/pkg/file/fingerprint.go b/pkg/models/fingerprint.go similarity index 90% rename from pkg/file/fingerprint.go rename to pkg/models/fingerprint.go index 3155276c594..0123f289d9f 100644 --- a/pkg/file/fingerprint.go +++ b/pkg/models/fingerprint.go @@ -1,4 +1,9 @@ -package file +package models + +import ( + "fmt" + "strconv" +) var ( FingerprintTypeOshash = "oshash" @@ -12,6 +17,15 @@ type Fingerprint struct { Fingerprint interface{} } +func (f *Fingerprint) Value() string { + switch v := f.Fingerprint.(type) { + case int64: + return strconv.FormatUint(uint64(v), 16) + default: + return fmt.Sprintf("%v", f.Fingerprint) + } +} + type Fingerprints []Fingerprint func (f *Fingerprints) Remove(type_ string) { @@ -114,8 +128,3 @@ func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints { return append(f, o) } - -// FingerprintCalculator calculates a fingerprint for the provided file. -type FingerprintCalculator interface { - CalculateFingerprints(f *BaseFile, o Opener, useExisting bool) ([]Fingerprint, error) -} diff --git a/pkg/file/fingerprint_test.go b/pkg/models/fingerprint_test.go similarity index 99% rename from pkg/file/fingerprint_test.go rename to pkg/models/fingerprint_test.go index f13ce22545a..d35f4b08231 100644 --- a/pkg/file/fingerprint_test.go +++ b/pkg/models/fingerprint_test.go @@ -1,4 +1,4 @@ -package file +package models import "testing" diff --git a/pkg/models/fs.go b/pkg/models/fs.go new file mode 100644 index 00000000000..bdbf603498d --- /dev/null +++ b/pkg/models/fs.go @@ -0,0 +1,27 @@ +package models + +import ( + "io" + "io/fs" +) + +// FileOpener provides an interface to open a file. +type FileOpener interface { + Open() (io.ReadCloser, error) +} + +// FS represents a file system. +type FS interface { + Stat(name string) (fs.FileInfo, error) + Lstat(name string) (fs.FileInfo, error) + Open(name string) (fs.ReadDirFile, error) + OpenZip(name string) (ZipFS, error) + IsPathCaseSensitive(path string) (bool, error) +} + +// ZipFS represents a zip file system. +type ZipFS interface { + FS + io.Closer + OpenOnly(name string) (io.ReadCloser, error) +} diff --git a/pkg/models/gallery.go b/pkg/models/gallery.go index 61ee2a72de3..d3644d3fd6c 100644 --- a/pkg/models/gallery.go +++ b/pkg/models/gallery.go @@ -1,11 +1,5 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) - type GalleryFilterType struct { And *GalleryFilterType `json:"AND"` Or *GalleryFilterType `json:"OR"` @@ -86,40 +80,3 @@ type GalleryDestroyInput struct { DeleteFile *bool `json:"delete_file"` DeleteGenerated *bool `json:"delete_generated"` } - -type GalleryFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Gallery, error) -} - -type GalleryReader interface { - Find(ctx context.Context, id int) (*Gallery, error) - GalleryFinder - FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error) - FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error) - FindByPath(ctx context.Context, path string) ([]*Gallery, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error) - FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error) - - SceneIDLoader - PerformerIDLoader - TagIDLoader - - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Gallery, error) - Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error) - QueryCount(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) (int, error) - GetImageIDs(ctx context.Context, galleryID int) ([]int, error) -} - -type GalleryWriter interface { - Create(ctx context.Context, newGallery *Gallery, fileIDs []file.ID) error - Update(ctx context.Context, updatedGallery *Gallery) error - UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) - Destroy(ctx context.Context, id int) error - UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error -} - -type GalleryReaderWriter interface { - GalleryReader - GalleryWriter -} diff --git a/pkg/models/gallery_chapter.go b/pkg/models/gallery_chapter.go deleted file mode 100644 index 0057809821b..00000000000 --- a/pkg/models/gallery_chapter.go +++ /dev/null @@ -1,21 +0,0 @@ -package models - -import "context" - -type GalleryChapterReader interface { - Find(ctx context.Context, id int) (*GalleryChapter, error) - FindMany(ctx context.Context, ids []int) ([]*GalleryChapter, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*GalleryChapter, error) -} - -type GalleryChapterWriter interface { - Create(ctx context.Context, newGalleryChapter *GalleryChapter) error - Update(ctx context.Context, updatedGalleryChapter *GalleryChapter) error - UpdatePartial(ctx context.Context, id int, updatedGalleryChapter GalleryChapterPartial) (*GalleryChapter, error) - Destroy(ctx context.Context, id int) error -} - -type GalleryChapterReaderWriter interface { - GalleryChapterReader - GalleryChapterWriter -} diff --git a/pkg/models/image.go b/pkg/models/image.go index 288f6997621..ef90dfd7d69 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -77,60 +77,21 @@ type ImageQueryResult struct { Megapixels float64 TotalSize float64 - finder ImageFinder + getter ImageGetter images []*Image resolveErr error } -func NewImageQueryResult(finder ImageFinder) *ImageQueryResult { +func NewImageQueryResult(getter ImageGetter) *ImageQueryResult { return &ImageQueryResult{ - finder: finder, + getter: getter, } } func (r *ImageQueryResult) Resolve(ctx context.Context) ([]*Image, error) { // cache results if r.images == nil && r.resolveErr == nil { - r.images, r.resolveErr = r.finder.FindMany(ctx, r.IDs) + r.images, r.resolveErr = r.getter.FindMany(ctx, r.IDs) } return r.images, r.resolveErr } - -type ImageFinder interface { - // TODO - rename to Find and remove existing method - FindMany(ctx context.Context, ids []int) ([]*Image, error) -} - -type ImageReader interface { - ImageFinder - // TODO - remove this in another PR - Find(ctx context.Context, id int) (*Image, error) - FindByChecksum(ctx context.Context, checksum string) ([]*Image, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) - CountByGalleryID(ctx context.Context, galleryID int) (int, error) - OCountByPerformerID(ctx context.Context, performerID int) (int, error) - Count(ctx context.Context) (int, error) - Size(ctx context.Context) (float64, error) - All(ctx context.Context) ([]*Image, error) - Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error) - QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) - - GalleryIDLoader - PerformerIDLoader - TagIDLoader -} - -type ImageWriter interface { - Create(ctx context.Context, newImage *ImageCreateInput) error - Update(ctx context.Context, updatedImage *Image) error - UpdatePartial(ctx context.Context, id int, partial ImagePartial) (*Image, error) - IncrementOCounter(ctx context.Context, id int) (int, error) - DecrementOCounter(ctx context.Context, id int) (int, error) - ResetOCounter(ctx context.Context, id int) (int, error) - Destroy(ctx context.Context, id int) error -} - -type ImageReaderWriter interface { - ImageReader - ImageWriter -} diff --git a/pkg/models/mocks/FileReaderWriter.go b/pkg/models/mocks/FileReaderWriter.go new file mode 100644 index 00000000000..8e7982b4745 --- /dev/null +++ b/pkg/models/mocks/FileReaderWriter.go @@ -0,0 +1,350 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + fs "io/fs" + + mock "github.com/stretchr/testify/mock" + + models "github.com/stashapp/stash/pkg/models" +) + +// FileReaderWriter is an autogenerated mock type for the FileReaderWriter type +type FileReaderWriter struct { + mock.Mock +} + +// CountAllInPaths provides a mock function with given fields: ctx, p +func (_m *FileReaderWriter) CountAllInPaths(ctx context.Context, p []string) (int, error) { + ret := _m.Called(ctx, p) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, []string) int); ok { + r0 = rf(ctx, p) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// CountByFolderID provides a mock function with given fields: ctx, folderID +func (_m *FileReaderWriter) CountByFolderID(ctx context.Context, folderID models.FolderID) (int, error) { + ret := _m.Called(ctx, folderID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) int); ok { + r0 = rf(ctx, folderID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, folderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: ctx, f +func (_m *FileReaderWriter) Create(ctx context.Context, f models.File) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.File) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Destroy provides a mock function with given fields: ctx, id +func (_m *FileReaderWriter) Destroy(ctx context.Context, id models.FileID) error { + ret := _m.Called(ctx, id) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, id +func (_m *FileReaderWriter) Find(ctx context.Context, id ...models.FileID) ([]models.File, error) { + _va := make([]interface{}, len(id)) + for _i := range id { + _va[_i] = id[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, ...models.FileID) []models.File); ok { + r0 = rf(ctx, id...) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, ...models.FileID) error); ok { + r1 = rf(ctx, id...) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllByPath provides a mock function with given fields: ctx, path +func (_m *FileReaderWriter) FindAllByPath(ctx context.Context, path string) ([]models.File, error) { + ret := _m.Called(ctx, path) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, string) []models.File); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset +func (_m *FileReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]models.File, error) { + ret := _m.Called(ctx, p, limit, offset) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []models.File); ok { + r0 = rf(ctx, p, limit, offset) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { + r1 = rf(ctx, p, limit, offset) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFileInfo provides a mock function with given fields: ctx, info, size +func (_m *FileReaderWriter) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]models.File, error) { + ret := _m.Called(ctx, info, size) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, fs.FileInfo, int64) []models.File); ok { + r0 = rf(ctx, info, size) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, fs.FileInfo, int64) error); ok { + r1 = rf(ctx, info, size) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprint provides a mock function with given fields: ctx, fp +func (_m *FileReaderWriter) FindByFingerprint(ctx context.Context, fp models.Fingerprint) ([]models.File, error) { + ret := _m.Called(ctx, fp) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, models.Fingerprint) []models.File); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPath provides a mock function with given fields: ctx, path +func (_m *FileReaderWriter) FindByPath(ctx context.Context, path string) (models.File, error) { + ret := _m.Called(ctx, path) + + var r0 models.File + if rf, ok := ret.Get(0).(func(context.Context, string) models.File); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *FileReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]models.File, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []models.File); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCaptions provides a mock function with given fields: ctx, fileID +func (_m *FileReaderWriter) GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.VideoCaption + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.VideoCaption); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.VideoCaption) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// IsPrimary provides a mock function with given fields: ctx, fileID +func (_m *FileReaderWriter) IsPrimary(ctx context.Context, fileID models.FileID) (bool, error) { + ret := _m.Called(ctx, fileID) + + var r0 bool + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) bool); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(bool) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Query provides a mock function with given fields: ctx, options +func (_m *FileReaderWriter) Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error) { + ret := _m.Called(ctx, options) + + var r0 *models.FileQueryResult + if rf, ok := ret.Get(0).(func(context.Context, models.FileQueryOptions) *models.FileQueryResult); ok { + r0 = rf(ctx, options) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.FileQueryResult) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileQueryOptions) error); ok { + r1 = rf(ctx, options) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: ctx, f +func (_m *FileReaderWriter) Update(ctx context.Context, f models.File) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.File) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateCaptions provides a mock function with given fields: ctx, fileID, captions +func (_m *FileReaderWriter) UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error { + ret := _m.Called(ctx, fileID, captions) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FileID, []*models.VideoCaption) error); ok { + r0 = rf(ctx, fileID, captions) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/FolderReaderWriter.go b/pkg/models/mocks/FolderReaderWriter.go new file mode 100644 index 00000000000..968bed4adc7 --- /dev/null +++ b/pkg/models/mocks/FolderReaderWriter.go @@ -0,0 +1,193 @@ +// Code generated by mockery v2.10.0. DO NOT EDIT. + +package mocks + +import ( + context "context" + + models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" +) + +// FolderReaderWriter is an autogenerated mock type for the FolderReaderWriter type +type FolderReaderWriter struct { + mock.Mock +} + +// CountAllInPaths provides a mock function with given fields: ctx, p +func (_m *FolderReaderWriter) CountAllInPaths(ctx context.Context, p []string) (int, error) { + ret := _m.Called(ctx, p) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, []string) int); ok { + r0 = rf(ctx, p) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string) error); ok { + r1 = rf(ctx, p) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Create provides a mock function with given fields: ctx, f +func (_m *FolderReaderWriter) Create(ctx context.Context, f *models.Folder) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Folder) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Destroy provides a mock function with given fields: ctx, id +func (_m *FolderReaderWriter) Destroy(ctx context.Context, id models.FolderID) error { + ret := _m.Called(ctx, id) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) error); ok { + r0 = rf(ctx, id) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// Find provides a mock function with given fields: ctx, id +func (_m *FolderReaderWriter) Find(ctx context.Context, id models.FolderID) (*models.Folder, error) { + ret := _m.Called(ctx, id) + + var r0 *models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) *models.Folder); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindAllInPaths provides a mock function with given fields: ctx, p, limit, offset +func (_m *FolderReaderWriter) FindAllInPaths(ctx context.Context, p []string, limit int, offset int) ([]*models.Folder, error) { + ret := _m.Called(ctx, p, limit, offset) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, []string, int, int) []*models.Folder); ok { + r0 = rf(ctx, p, limit, offset) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []string, int, int) error); ok { + r1 = rf(ctx, p, limit, offset) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByParentFolderID provides a mock function with given fields: ctx, parentFolderID +func (_m *FolderReaderWriter) FindByParentFolderID(ctx context.Context, parentFolderID models.FolderID) ([]*models.Folder, error) { + ret := _m.Called(ctx, parentFolderID) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Folder); ok { + r0 = rf(ctx, parentFolderID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, parentFolderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByPath provides a mock function with given fields: ctx, path +func (_m *FolderReaderWriter) FindByPath(ctx context.Context, path string) (*models.Folder, error) { + ret := _m.Called(ctx, path) + + var r0 *models.Folder + if rf, ok := ret.Get(0).(func(context.Context, string) *models.Folder); ok { + r0 = rf(ctx, path) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, path) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *FolderReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Folder, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []*models.Folder + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Folder); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Folder) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// Update provides a mock function with given fields: ctx, f +func (_m *FolderReaderWriter) Update(ctx context.Context, f *models.Folder) error { + ret := _m.Called(ctx, f) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, *models.Folder) error); ok { + r0 = rf(ctx, f) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/GalleryReaderWriter.go b/pkg/models/mocks/GalleryReaderWriter.go index 1c0ddf95775..2b901466b19 100644 --- a/pkg/models/mocks/GalleryReaderWriter.go +++ b/pkg/models/mocks/GalleryReaderWriter.go @@ -5,10 +5,8 @@ package mocks import ( context "context" - file "github.com/stashapp/stash/pkg/file" - mock "github.com/stretchr/testify/mock" - models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" ) // GalleryReaderWriter is an autogenerated mock type for the GalleryReaderWriter type @@ -16,6 +14,41 @@ type GalleryReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *GalleryReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AddImages provides a mock function with given fields: ctx, galleryID, imageIDs +func (_m *GalleryReaderWriter) AddImages(ctx context.Context, galleryID int, imageIDs ...int) error { + _va := make([]interface{}, len(imageIDs)) + for _i := range imageIDs { + _va[_i] = imageIDs[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, galleryID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, ...int) error); ok { + r0 = rf(ctx, galleryID, imageIDs...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *GalleryReaderWriter) All(ctx context.Context) ([]*models.Gallery, error) { ret := _m.Called(ctx) @@ -60,12 +93,33 @@ func (_m *GalleryReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *GalleryReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // Create provides a mock function with given fields: ctx, newGallery, fileIDs -func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []file.ID) error { +func (_m *GalleryReaderWriter) Create(ctx context.Context, newGallery *models.Gallery, fileIDs []models.FileID) error { ret := _m.Called(ctx, newGallery, fileIDs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []file.ID) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.Gallery, []models.FileID) error); ok { r0 = rf(ctx, newGallery, fileIDs) } else { r0 = ret.Error(0) @@ -157,6 +211,75 @@ func (_m *GalleryReaderWriter) FindByChecksums(ctx context.Context, checksums [] return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *GalleryReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Gallery); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *GalleryReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Gallery); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFolderID provides a mock function with given fields: ctx, folderID +func (_m *GalleryReaderWriter) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) { + ret := _m.Called(ctx, folderID) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Gallery); ok { + r0 = rf(ctx, folderID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, folderID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByImageID provides a mock function with given fields: ctx, imageID func (_m *GalleryReaderWriter) FindByImageID(ctx context.Context, imageID int) ([]*models.Gallery, error) { ret := _m.Called(ctx, imageID) @@ -249,13 +372,59 @@ func (_m *GalleryReaderWriter) FindMany(ctx context.Context, ids []int) ([]*mode return r0, r1 } -// GetImageIDs provides a mock function with given fields: ctx, galleryID -func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, galleryID int) ([]int, error) { - ret := _m.Called(ctx, galleryID) +// FindUserGalleryByTitle provides a mock function with given fields: ctx, title +func (_m *GalleryReaderWriter) FindUserGalleryByTitle(ctx context.Context, title string) ([]*models.Gallery, error) { + ret := _m.Called(ctx, title) + + var r0 []*models.Gallery + if rf, ok := ret.Get(0).(func(context.Context, string) []*models.Gallery); ok { + r0 = rf(ctx, title) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Gallery) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { + r1 = rf(ctx, title) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetFiles provides a mock function with given fields: ctx, relatedID +func (_m *GalleryReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, int) []models.File); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetImageIDs provides a mock function with given fields: ctx, relatedID +func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) var r0 []int if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { - r0 = rf(ctx, galleryID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]int) @@ -264,7 +433,30 @@ func (_m *GalleryReaderWriter) GetImageIDs(ctx context.Context, galleryID int) ( var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, galleryID) + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *GalleryReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) } else { r1 = ret.Error(1) } @@ -392,6 +584,27 @@ func (_m *GalleryReaderWriter) QueryCount(ctx context.Context, galleryFilter *mo return r0, r1 } +// RemoveImages provides a mock function with given fields: ctx, galleryID, imageIDs +func (_m *GalleryReaderWriter) RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error { + _va := make([]interface{}, len(imageIDs)) + for _i := range imageIDs { + _va[_i] = imageIDs[_i] + } + var _ca []interface{} + _ca = append(_ca, ctx, galleryID) + _ca = append(_ca, _va...) + ret := _m.Called(_ca...) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, ...int) error); ok { + r0 = rf(ctx, galleryID, imageIDs...) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Update provides a mock function with given fields: ctx, updatedGallery func (_m *GalleryReaderWriter) Update(ctx context.Context, updatedGallery *models.Gallery) error { ret := _m.Called(ctx, updatedGallery) diff --git a/pkg/models/mocks/ImageReaderWriter.go b/pkg/models/mocks/ImageReaderWriter.go index f745f8afe27..4924fd51d11 100644 --- a/pkg/models/mocks/ImageReaderWriter.go +++ b/pkg/models/mocks/ImageReaderWriter.go @@ -14,6 +14,20 @@ type ImageReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *ImageReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *ImageReaderWriter) All(ctx context.Context) ([]*models.Image, error) { ret := _m.Called(ctx) @@ -58,6 +72,27 @@ func (_m *ImageReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CountByGalleryID provides a mock function with given fields: ctx, galleryID func (_m *ImageReaderWriter) CountByGalleryID(ctx context.Context, galleryID int) (int, error) { ret := _m.Called(ctx, galleryID) @@ -174,6 +209,75 @@ func (_m *ImageReaderWriter) FindByChecksum(ctx context.Context, checksum string return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Image); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *ImageReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Image); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFolderID provides a mock function with given fields: ctx, fileID +func (_m *ImageReaderWriter) FindByFolderID(ctx context.Context, fileID models.FolderID) ([]*models.Image, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FolderID) []*models.Image); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FolderID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByGalleryID provides a mock function with given fields: ctx, galleryID func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) ([]*models.Image, error) { ret := _m.Called(ctx, galleryID) @@ -197,6 +301,29 @@ func (_m *ImageReaderWriter) FindByGalleryID(ctx context.Context, galleryID int) return r0, r1 } +// FindByZipFileID provides a mock function with given fields: ctx, zipFileID +func (_m *ImageReaderWriter) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) { + ret := _m.Called(ctx, zipFileID) + + var r0 []*models.Image + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Image); ok { + r0 = rf(ctx, zipFileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Image) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, zipFileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindMany provides a mock function with given fields: ctx, ids func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) { ret := _m.Called(ctx, ids) @@ -220,6 +347,29 @@ func (_m *ImageReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models return r0, r1 } +// GetFiles provides a mock function with given fields: ctx, relatedID +func (_m *ImageReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]models.File, error) { + ret := _m.Called(ctx, relatedID) + + var r0 []models.File + if rf, ok := ret.Get(0).(func(context.Context, int) []models.File); ok { + r0 = rf(ctx, relatedID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.File) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, relatedID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetGalleryIDs provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) { ret := _m.Called(ctx, relatedID) @@ -243,6 +393,29 @@ func (_m *ImageReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *ImageReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetPerformerIDs provides a mock function with given fields: ctx, relatedID func (_m *ImageReaderWriter) GetPerformerIDs(ctx context.Context, relatedID int) ([]int, error) { ret := _m.Called(ctx, relatedID) @@ -453,3 +626,31 @@ func (_m *ImageReaderWriter) UpdatePartial(ctx context.Context, id int, partial return r0, r1 } + +// UpdatePerformers provides a mock function with given fields: ctx, imageID, performerIDs +func (_m *ImageReaderWriter) UpdatePerformers(ctx context.Context, imageID int, performerIDs []int) error { + ret := _m.Called(ctx, imageID, performerIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, imageID, performerIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// UpdateTags provides a mock function with given fields: ctx, imageID, tagIDs +func (_m *ImageReaderWriter) UpdateTags(ctx context.Context, imageID int, tagIDs []int) error { + ret := _m.Called(ctx, imageID, tagIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, imageID, tagIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} diff --git a/pkg/models/mocks/SceneMarkerReaderWriter.go b/pkg/models/mocks/SceneMarkerReaderWriter.go index 2be3b118437..4b4fa6d2ce9 100644 --- a/pkg/models/mocks/SceneMarkerReaderWriter.go +++ b/pkg/models/mocks/SceneMarkerReaderWriter.go @@ -199,13 +199,13 @@ func (_m *SceneMarkerReaderWriter) GetMarkerStrings(ctx context.Context, q *stri return r0, r1 } -// GetTagIDs provides a mock function with given fields: ctx, imageID -func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, imageID int) ([]int, error) { - ret := _m.Called(ctx, imageID) +// GetTagIDs provides a mock function with given fields: ctx, relatedID +func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, relatedID int) ([]int, error) { + ret := _m.Called(ctx, relatedID) var r0 []int if rf, ok := ret.Get(0).(func(context.Context, int) []int); ok { - r0 = rf(ctx, imageID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]int) @@ -214,7 +214,7 @@ func (_m *SceneMarkerReaderWriter) GetTagIDs(ctx context.Context, imageID int) ( var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, imageID) + r1 = rf(ctx, relatedID) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/SceneReaderWriter.go b/pkg/models/mocks/SceneReaderWriter.go index 8d7245ee9ea..93374587198 100644 --- a/pkg/models/mocks/SceneReaderWriter.go +++ b/pkg/models/mocks/SceneReaderWriter.go @@ -5,10 +5,8 @@ package mocks import ( context "context" - file "github.com/stashapp/stash/pkg/file" - mock "github.com/stretchr/testify/mock" - models "github.com/stashapp/stash/pkg/models" + mock "github.com/stretchr/testify/mock" ) // SceneReaderWriter is an autogenerated mock type for the SceneReaderWriter type @@ -16,6 +14,34 @@ type SceneReaderWriter struct { mock.Mock } +// AddFileID provides a mock function with given fields: ctx, id, fileID +func (_m *SceneReaderWriter) AddFileID(ctx context.Context, id int, fileID models.FileID) error { + ret := _m.Called(ctx, id, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, models.FileID) error); ok { + r0 = rf(ctx, id, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + +// AddGalleryIDs provides a mock function with given fields: ctx, sceneID, galleryIDs +func (_m *SceneReaderWriter) AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error { + ret := _m.Called(ctx, sceneID, galleryIDs) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []int) error); ok { + r0 = rf(ctx, sceneID, galleryIDs) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // All provides a mock function with given fields: ctx func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) { ret := _m.Called(ctx) @@ -39,6 +65,20 @@ func (_m *SceneReaderWriter) All(ctx context.Context) ([]*models.Scene, error) { return r0, r1 } +// AssignFiles provides a mock function with given fields: ctx, sceneID, fileID +func (_m *SceneReaderWriter) AssignFiles(ctx context.Context, sceneID int, fileID []models.FileID) error { + ret := _m.Called(ctx, sceneID, fileID) + + var r0 error + if rf, ok := ret.Get(0).(func(context.Context, int, []models.FileID) error); ok { + r0 = rf(ctx, sceneID, fileID) + } else { + r0 = ret.Error(0) + } + + return r0 +} + // Count provides a mock function with given fields: ctx func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) { ret := _m.Called(ctx) @@ -60,6 +100,27 @@ func (_m *SceneReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } +// CountByFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { + ret := _m.Called(ctx, fileID) + + var r0 int + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) int); ok { + r0 = rf(ctx, fileID) + } else { + r0 = ret.Get(0).(int) + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // CountByMovieID provides a mock function with given fields: ctx, movieID func (_m *SceneReaderWriter) CountByMovieID(ctx context.Context, movieID int) (int, error) { ret := _m.Called(ctx, movieID) @@ -187,11 +248,11 @@ func (_m *SceneReaderWriter) CountMissingOSHash(ctx context.Context) (int, error } // Create provides a mock function with given fields: ctx, newScene, fileIDs -func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error { +func (_m *SceneReaderWriter) Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error { ret := _m.Called(ctx, newScene, fileIDs) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Scene, []file.ID) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.Scene, []models.FileID) error); ok { r0 = rf(ctx, newScene, fileIDs) } else { r0 = ret.Error(0) @@ -302,6 +363,52 @@ func (_m *SceneReaderWriter) FindByChecksum(ctx context.Context, checksum string return r0, r1 } +// FindByFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Scene); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// FindByFingerprints provides a mock function with given fields: ctx, fp +func (_m *SceneReaderWriter) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) { + ret := _m.Called(ctx, fp) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, []models.Fingerprint) []*models.Scene); ok { + r0 = rf(ctx, fp) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []models.Fingerprint) error); ok { + r1 = rf(ctx, fp) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByGalleryID provides a mock function with given fields: ctx, performerID func (_m *SceneReaderWriter) FindByGalleryID(ctx context.Context, performerID int) ([]*models.Scene, error) { ret := _m.Called(ctx, performerID) @@ -417,6 +524,29 @@ func (_m *SceneReaderWriter) FindByPerformerID(ctx context.Context, performerID return r0, r1 } +// FindByPrimaryFileID provides a mock function with given fields: ctx, fileID +func (_m *SceneReaderWriter) FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { + ret := _m.Called(ctx, fileID) + + var r0 []*models.Scene + if rf, ok := ret.Get(0).(func(context.Context, models.FileID) []*models.Scene); ok { + r0 = rf(ctx, fileID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]*models.Scene) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, models.FileID) error); ok { + r1 = rf(ctx, fileID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindDuplicates provides a mock function with given fields: ctx, distance, durationDiff func (_m *SceneReaderWriter) FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*models.Scene, error) { ret := _m.Called(ctx, distance, durationDiff) @@ -487,15 +617,15 @@ func (_m *SceneReaderWriter) GetCover(ctx context.Context, sceneID int) ([]byte, } // GetFiles provides a mock function with given fields: ctx, relatedID -func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) { +func (_m *SceneReaderWriter) GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) { ret := _m.Called(ctx, relatedID) - var r0 []*file.VideoFile - if rf, ok := ret.Get(0).(func(context.Context, int) []*file.VideoFile); ok { + var r0 []*models.VideoFile + if rf, ok := ret.Get(0).(func(context.Context, int) []*models.VideoFile); ok { r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).([]*file.VideoFile) + r0 = ret.Get(0).([]*models.VideoFile) } } @@ -532,6 +662,29 @@ func (_m *SceneReaderWriter) GetGalleryIDs(ctx context.Context, relatedID int) ( return r0, r1 } +// GetManyFileIDs provides a mock function with given fields: ctx, ids +func (_m *SceneReaderWriter) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { + ret := _m.Called(ctx, ids) + + var r0 [][]models.FileID + if rf, ok := ret.Get(0).(func(context.Context, []int) [][]models.FileID); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([][]models.FileID) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetMovies provides a mock function with given fields: ctx, id func (_m *SceneReaderWriter) GetMovies(ctx context.Context, id int) ([]models.MoviesScenes, error) { ret := _m.Called(ctx, id) @@ -689,20 +842,20 @@ func (_m *SceneReaderWriter) IncrementOCounter(ctx context.Context, id int) (int return r0, r1 } -// IncrementWatchCount provides a mock function with given fields: ctx, id -func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, id int) (int, error) { - ret := _m.Called(ctx, id) +// IncrementWatchCount provides a mock function with given fields: ctx, sceneID +func (_m *SceneReaderWriter) IncrementWatchCount(ctx context.Context, sceneID int) (int, error) { + ret := _m.Called(ctx, sceneID) var r0 int if rf, ok := ret.Get(0).(func(context.Context, int) int); ok { - r0 = rf(ctx, id) + r0 = rf(ctx, sceneID) } else { r0 = ret.Get(0).(int) } var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, id) + r1 = rf(ctx, sceneID) } else { r1 = ret.Error(1) } @@ -859,20 +1012,20 @@ func (_m *SceneReaderWriter) ResetOCounter(ctx context.Context, id int) (int, er return r0, r1 } -// SaveActivity provides a mock function with given fields: ctx, id, resumeTime, playDuration -func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) { - ret := _m.Called(ctx, id, resumeTime, playDuration) +// SaveActivity provides a mock function with given fields: ctx, sceneID, resumeTime, playDuration +func (_m *SceneReaderWriter) SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) { + ret := _m.Called(ctx, sceneID, resumeTime, playDuration) var r0 bool if rf, ok := ret.Get(0).(func(context.Context, int, *float64, *float64) bool); ok { - r0 = rf(ctx, id, resumeTime, playDuration) + r0 = rf(ctx, sceneID, resumeTime, playDuration) } else { r0 = ret.Get(0).(bool) } var r1 error if rf, ok := ret.Get(1).(func(context.Context, int, *float64, *float64) error); ok { - r1 = rf(ctx, id, resumeTime, playDuration) + r1 = rf(ctx, sceneID, resumeTime, playDuration) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/StudioReaderWriter.go b/pkg/models/mocks/StudioReaderWriter.go index 56fd6200db7..c46e45d4c24 100644 --- a/pkg/models/mocks/StudioReaderWriter.go +++ b/pkg/models/mocks/StudioReaderWriter.go @@ -58,13 +58,13 @@ func (_m *StudioReaderWriter) Count(ctx context.Context) (int, error) { return r0, r1 } -// Create provides a mock function with given fields: ctx, input -func (_m *StudioReaderWriter) Create(ctx context.Context, input *models.Studio) error { - ret := _m.Called(ctx, input) +// Create provides a mock function with given fields: ctx, newStudio +func (_m *StudioReaderWriter) Create(ctx context.Context, newStudio *models.Studio) error { + ret := _m.Called(ctx, newStudio) var r0 error if rf, ok := ret.Get(0).(func(context.Context, *models.Studio) error); ok { - r0 = rf(ctx, input) + r0 = rf(ctx, newStudio) } else { r0 = ret.Error(0) } @@ -132,6 +132,29 @@ func (_m *StudioReaderWriter) FindByName(ctx context.Context, name string, nocas return r0, r1 } +// FindBySceneID provides a mock function with given fields: ctx, sceneID +func (_m *StudioReaderWriter) FindBySceneID(ctx context.Context, sceneID int) (*models.Studio, error) { + ret := _m.Called(ctx, sceneID) + + var r0 *models.Studio + if rf, ok := ret.Get(0).(func(context.Context, int) *models.Studio); ok { + r0 = rf(ctx, sceneID) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(*models.Studio) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, sceneID) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // FindByStashID provides a mock function with given fields: ctx, stashID func (_m *StudioReaderWriter) FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) { ret := _m.Called(ctx, stashID) @@ -395,13 +418,13 @@ func (_m *StudioReaderWriter) UpdateImage(ctx context.Context, studioID int, ima return r0 } -// UpdatePartial provides a mock function with given fields: ctx, input -func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, input models.StudioPartial) (*models.Studio, error) { - ret := _m.Called(ctx, input) +// UpdatePartial provides a mock function with given fields: ctx, updatedStudio +func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, updatedStudio models.StudioPartial) (*models.Studio, error) { + ret := _m.Called(ctx, updatedStudio) var r0 *models.Studio if rf, ok := ret.Get(0).(func(context.Context, models.StudioPartial) *models.Studio); ok { - r0 = rf(ctx, input) + r0 = rf(ctx, updatedStudio) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*models.Studio) @@ -410,7 +433,7 @@ func (_m *StudioReaderWriter) UpdatePartial(ctx context.Context, input models.St var r1 error if rf, ok := ret.Get(1).(func(context.Context, models.StudioPartial) error); ok { - r1 = rf(ctx, input) + r1 = rf(ctx, updatedStudio) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/TagReaderWriter.go b/pkg/models/mocks/TagReaderWriter.go index 680c78c46c6..9b610e49b6e 100644 --- a/pkg/models/mocks/TagReaderWriter.go +++ b/pkg/models/mocks/TagReaderWriter.go @@ -427,13 +427,13 @@ func (_m *TagReaderWriter) FindMany(ctx context.Context, ids []int) ([]*models.T return r0, r1 } -// GetAliases provides a mock function with given fields: ctx, tagID -func (_m *TagReaderWriter) GetAliases(ctx context.Context, tagID int) ([]string, error) { - ret := _m.Called(ctx, tagID) +// GetAliases provides a mock function with given fields: ctx, relatedID +func (_m *TagReaderWriter) GetAliases(ctx context.Context, relatedID int) ([]string, error) { + ret := _m.Called(ctx, relatedID) var r0 []string if rf, ok := ret.Get(0).(func(context.Context, int) []string); ok { - r0 = rf(ctx, tagID) + r0 = rf(ctx, relatedID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]string) @@ -442,7 +442,7 @@ func (_m *TagReaderWriter) GetAliases(ctx context.Context, tagID int) ([]string, var r1 error if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { - r1 = rf(ctx, tagID) + r1 = rf(ctx, relatedID) } else { r1 = ret.Error(1) } diff --git a/pkg/models/mocks/query.go b/pkg/models/mocks/query.go index 346bd1e5557..dd35d0f868f 100644 --- a/pkg/models/mocks/query.go +++ b/pkg/models/mocks/query.go @@ -31,6 +31,10 @@ type imageResolver struct { images []*models.Image } +func (s *imageResolver) Find(ctx context.Context, id int) (*models.Image, error) { + panic("not implemented") +} + func (s *imageResolver) FindMany(ctx context.Context, ids []int) ([]*models.Image, error) { return s.images, nil } diff --git a/pkg/models/model_file.go b/pkg/models/model_file.go index 4e8ddbef8f6..b4ca2c3c1e8 100644 --- a/pkg/models/model_file.go +++ b/pkg/models/model_file.go @@ -1,9 +1,14 @@ package models import ( + "bytes" "fmt" "io" + "io/fs" + "math" + "net/http" "strconv" + "time" ) type HashAlgorithm string @@ -47,3 +52,244 @@ func (e *HashAlgorithm) UnmarshalGQL(v interface{}) error { func (e HashAlgorithm) MarshalGQL(w io.Writer) { fmt.Fprint(w, strconv.Quote(e.String())) } + +// ID represents an ID of a file. +type FileID int32 + +func (i FileID) String() string { + return strconv.Itoa(int(i)) +} + +func (i *FileID) UnmarshalGQL(v interface{}) (err error) { + switch v := v.(type) { + case string: + var id int + id, err = strconv.Atoi(v) + *i = FileID(id) + return err + case int: + *i = FileID(v) + return nil + default: + return fmt.Errorf("%T is not an int", v) + } +} + +func (i FileID) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(i.String())) +} + +// DirEntry represents a file or directory in the file system. +type DirEntry struct { + ZipFileID *FileID `json:"zip_file_id"` + + // transient - not persisted + // only guaranteed to have id, path and basename set + ZipFile File + + ModTime time.Time `json:"mod_time"` +} + +func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) { + if e.ZipFile != nil { + zipPath := e.ZipFile.Base().Path + zfs, err := fs.OpenZip(zipPath) + if err != nil { + return nil, err + } + defer zfs.Close() + fs = zfs + } + // else assume os file + + ret, err := fs.Lstat(path) + return ret, err +} + +// File represents a file in the file system. +type File interface { + Base() *BaseFile + SetFingerprints(fp Fingerprints) + Open(fs FS) (io.ReadCloser, error) +} + +// BaseFile represents a file in the file system. +type BaseFile struct { + ID FileID `json:"id"` + + DirEntry + + // resolved from parent folder and basename only - not stored in DB + Path string `json:"path"` + + Basename string `json:"basename"` + ParentFolderID FolderID `json:"parent_folder_id"` + + Fingerprints Fingerprints `json:"fingerprints"` + + Size int64 `json:"size"` + + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (f *BaseFile) FingerprintSlice() []Fingerprint { + return f.Fingerprints +} + +// SetFingerprints sets the fingerprints of the file. +// If a fingerprint of the same type already exists, it is overwritten. +func (f *BaseFile) SetFingerprints(fp Fingerprints) { + for _, v := range fp { + f.SetFingerprint(v) + } +} + +// SetFingerprint sets the fingerprint of the file. +// If a fingerprint of the same type already exists, it is overwritten. +func (f *BaseFile) SetFingerprint(fp Fingerprint) { + for i, existing := range f.Fingerprints { + if existing.Type == fp.Type { + f.Fingerprints[i] = fp + return + } + } + + f.Fingerprints = append(f.Fingerprints, fp) +} + +// Base is used to fulfil the File interface. +func (f *BaseFile) Base() *BaseFile { + return f +} + +func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) { + if f.ZipFile != nil { + zipPath := f.ZipFile.Base().Path + zfs, err := fs.OpenZip(zipPath) + if err != nil { + return nil, err + } + + return zfs.OpenOnly(f.Path) + } + + return fs.Open(f.Path) +} + +func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) { + return f.info(fs, f.Path) +} + +func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) error { + reader, err := f.Open(fs) + if err != nil { + return err + } + + defer reader.Close() + + content, ok := reader.(io.ReadSeeker) + if !ok { + data, err := io.ReadAll(reader) + if err != nil { + return err + } + content = bytes.NewReader(data) + } + + if r.URL.Query().Has("t") { + w.Header().Set("Cache-Control", "private, max-age=31536000, immutable") + } else { + w.Header().Set("Cache-Control", "no-cache") + } + http.ServeContent(w, r, f.Basename, f.ModTime, content) + + return nil +} + +// VisualFile is an interface for files that have a width and height. +type VisualFile interface { + File + GetWidth() int + GetHeight() int + GetFormat() string +} + +func GetMinResolution(f VisualFile) int { + w := f.GetWidth() + h := f.GetHeight() + + if w < h { + return w + } + + return h +} + +// ImageFile is an extension of BaseFile to represent image files. +type ImageFile struct { + *BaseFile + Format string `json:"format"` + Width int `json:"width"` + Height int `json:"height"` +} + +func (f ImageFile) GetWidth() int { + return f.Width +} + +func (f ImageFile) GetHeight() int { + return f.Height +} + +func (f ImageFile) GetFormat() string { + return f.Format +} + +// VideoFile is an extension of BaseFile to represent video files. +type VideoFile struct { + *BaseFile + Format string `json:"format"` + Width int `json:"width"` + Height int `json:"height"` + Duration float64 `json:"duration"` + VideoCodec string `json:"video_codec"` + AudioCodec string `json:"audio_codec"` + FrameRate float64 `json:"frame_rate"` + BitRate int64 `json:"bitrate"` + + Interactive bool `json:"interactive"` + InteractiveSpeed *int `json:"interactive_speed"` +} + +func (f VideoFile) GetWidth() int { + return f.Width +} + +func (f VideoFile) GetHeight() int { + return f.Height +} + +func (f VideoFile) GetFormat() string { + return f.Format +} + +// #1572 - Inf and NaN values cause the JSON marshaller to fail +// Replace these values with 0 rather than erroring + +func (f VideoFile) DurationFinite() float64 { + ret := f.Duration + if math.IsInf(ret, 0) || math.IsNaN(ret) { + return 0 + } + return ret +} + +func (f VideoFile) FrameRateFinite() float64 { + ret := f.FrameRate + if math.IsInf(ret, 0) || math.IsNaN(ret) { + return 0 + } + return ret +} diff --git a/pkg/models/model_folder.go b/pkg/models/model_folder.go new file mode 100644 index 00000000000..590cdd7bd30 --- /dev/null +++ b/pkg/models/model_folder.go @@ -0,0 +1,51 @@ +package models + +import ( + "fmt" + "io" + "io/fs" + "strconv" + "time" +) + +// FolderID represents an ID of a folder. +type FolderID int32 + +// String converts the ID to a string. +func (i FolderID) String() string { + return strconv.Itoa(int(i)) +} + +func (i *FolderID) UnmarshalGQL(v interface{}) (err error) { + switch v := v.(type) { + case string: + var id int + id, err = strconv.Atoi(v) + *i = FolderID(id) + return err + case int: + *i = FolderID(v) + return nil + default: + return fmt.Errorf("%T is not an int", v) + } +} + +func (i FolderID) MarshalGQL(w io.Writer) { + fmt.Fprint(w, strconv.Quote(i.String())) +} + +// Folder represents a folder in the file system. +type Folder struct { + ID FolderID `json:"id"` + DirEntry + Path string `json:"path"` + ParentFolderID *FolderID `json:"parent_folder_id"` + + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` +} + +func (f *Folder) Info(fs FS) (fs.FileInfo, error) { + return f.info(fs, f.Path) +} diff --git a/pkg/models/model_gallery.go b/pkg/models/model_gallery.go index 932d5cd17bc..8f563f06f7b 100644 --- a/pkg/models/model_gallery.go +++ b/pkg/models/model_gallery.go @@ -5,8 +5,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) type Gallery struct { @@ -24,11 +22,11 @@ type Gallery struct { // transient - not persisted Files RelatedFiles // transient - not persisted - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file or folder Path string - FolderID *file.FolderID `json:"folder_id"` + FolderID *FolderID `json:"folder_id"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` @@ -45,13 +43,13 @@ func (g *Gallery) IsUserCreated() bool { } func (g *Gallery) LoadFiles(ctx context.Context, l FileLoader) error { - return g.Files.load(func() ([]file.File, error) { + return g.Files.load(func() ([]File, error) { return l.GetFiles(ctx, g.ID) }) } -func (g *Gallery) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return g.Files.loadPrimary(func() (file.File, error) { +func (g *Gallery) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return g.Files.loadPrimary(func() (File, error) { if g.PrimaryFileID == nil { return nil, nil } @@ -89,7 +87,7 @@ func (g *Gallery) LoadTagIDs(ctx context.Context, l TagIDLoader) error { func (g Gallery) PrimaryChecksum() string { // renamed from Checksum to prevent gqlgen from using it in the resolver if p := g.Files.Primary(); p != nil { - v := p.Base().Fingerprints.Get(file.FingerprintTypeMD5) + v := p.Base().Fingerprints.Get(FingerprintTypeMD5) if v == nil { return "" } @@ -120,7 +118,7 @@ type GalleryPartial struct { SceneIDs *UpdateIDs TagIDs *UpdateIDs PerformerIDs *UpdateIDs - PrimaryFileID *file.ID + PrimaryFileID *FileID } func NewGalleryPartial() GalleryPartial { diff --git a/pkg/models/model_image.go b/pkg/models/model_image.go index e025ba0b174..9e0a0389a77 100644 --- a/pkg/models/model_image.go +++ b/pkg/models/model_image.go @@ -5,8 +5,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) // Image stores the metadata for a single image. @@ -24,7 +22,7 @@ type Image struct { // transient - not persisted Files RelatedFiles - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file - empty if no files Path string // transient - checksum of primary file - empty if no files @@ -39,13 +37,13 @@ type Image struct { } func (i *Image) LoadFiles(ctx context.Context, l FileLoader) error { - return i.Files.load(func() ([]file.File, error) { + return i.Files.load(func() ([]File, error) { return l.GetFiles(ctx, i.ID) }) } -func (i *Image) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return i.Files.loadPrimary(func() (file.File, error) { +func (i *Image) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return i.Files.loadPrimary(func() (File, error) { if i.PrimaryFileID == nil { return nil, nil } @@ -107,7 +105,7 @@ func (i Image) DisplayName() string { type ImageCreateInput struct { *Image - FileIDs []file.ID + FileIDs []FileID } type ImagePartial struct { @@ -125,7 +123,7 @@ type ImagePartial struct { GalleryIDs *UpdateIDs TagIDs *UpdateIDs PerformerIDs *UpdateIDs - PrimaryFileID *file.ID + PrimaryFileID *FileID } func NewImagePartial() ImagePartial { diff --git a/pkg/models/model_saved_filter.go b/pkg/models/model_saved_filter.go index 23f06e2600e..51c50be51d1 100644 --- a/pkg/models/model_saved_filter.go +++ b/pkg/models/model_saved_filter.go @@ -60,11 +60,12 @@ func (e FilterMode) MarshalGQL(w io.Writer) { } type SavedFilter struct { - ID int `json:"id"` - Mode FilterMode `json:"mode"` - Name string `json:"name"` - // JSON-encoded filter string - Filter string `json:"filter"` + ID int `db:"id" json:"id"` + Mode FilterMode `db:"mode" json:"mode"` + Name string `db:"name" json:"name"` + FindFilter *FindFilterType `json:"find_filter"` + ObjectFilter map[string]interface{} `json:"object_filter"` + UIOptions map[string]interface{} `json:"ui_options"` } type SavedFilters []*SavedFilter diff --git a/pkg/models/model_scene.go b/pkg/models/model_scene.go index f19113f499a..eadbaab3a34 100644 --- a/pkg/models/model_scene.go +++ b/pkg/models/model_scene.go @@ -6,8 +6,6 @@ import ( "path/filepath" "strconv" "time" - - "github.com/stashapp/stash/pkg/file" ) // Scene stores the metadata for a single video scene. @@ -26,7 +24,7 @@ type Scene struct { // transient - not persisted Files RelatedVideoFiles - PrimaryFileID *file.ID + PrimaryFileID *FileID // transient - path of primary file - empty if no files Path string // transient - oshash of primary file - empty if no files @@ -57,13 +55,13 @@ func (s *Scene) LoadURLs(ctx context.Context, l URLLoader) error { } func (s *Scene) LoadFiles(ctx context.Context, l VideoFileLoader) error { - return s.Files.load(func() ([]*file.VideoFile, error) { + return s.Files.load(func() ([]*VideoFile, error) { return l.GetFiles(ctx, s.ID) }) } -func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error { - return s.Files.loadPrimary(func() (*file.VideoFile, error) { +func (s *Scene) LoadPrimaryFile(ctx context.Context, l FileGetter) error { + return s.Files.loadPrimary(func() (*VideoFile, error) { if s.PrimaryFileID == nil { return nil, nil } @@ -73,10 +71,10 @@ func (s *Scene) LoadPrimaryFile(ctx context.Context, l file.Finder) error { return nil, err } - var vf *file.VideoFile + var vf *VideoFile if len(f) > 0 { var ok bool - vf, ok = f[0].(*file.VideoFile) + vf, ok = f[0].(*VideoFile) if !ok { return nil, errors.New("not a video file") } @@ -173,7 +171,7 @@ type ScenePartial struct { PerformerIDs *UpdateIDs MovieIDs *UpdateMovieIDs StashIDs *UpdateStashIDs - PrimaryFileID *file.ID + PrimaryFileID *FileID } func NewScenePartial() ScenePartial { diff --git a/pkg/models/movie.go b/pkg/models/movie.go index d00b3f49106..be7effad376 100644 --- a/pkg/models/movie.go +++ b/pkg/models/movie.go @@ -1,7 +1,5 @@ package models -import "context" - type MovieFilterType struct { Name *StringCriterionInput `json:"name"` Director *StringCriterionInput `json:"director"` @@ -27,37 +25,3 @@ type MovieFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type MovieReader interface { - Find(ctx context.Context, id int) (*Movie, error) - FindMany(ctx context.Context, ids []int) ([]*Movie, error) - // FindBySceneID(sceneID int) ([]*Movie, error) - FindByName(ctx context.Context, name string, nocase bool) (*Movie, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Movie, error) - All(ctx context.Context) ([]*Movie, error) - Count(ctx context.Context) (int, error) - Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) - QueryCount(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) (int, error) - GetFrontImage(ctx context.Context, movieID int) ([]byte, error) - HasFrontImage(ctx context.Context, movieID int) (bool, error) - GetBackImage(ctx context.Context, movieID int) ([]byte, error) - HasBackImage(ctx context.Context, movieID int) (bool, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) - CountByPerformerID(ctx context.Context, performerID int) (int, error) - FindByStudioID(ctx context.Context, studioID int) ([]*Movie, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) -} - -type MovieWriter interface { - Create(ctx context.Context, newMovie *Movie) error - UpdatePartial(ctx context.Context, id int, updatedMovie MoviePartial) (*Movie, error) - Update(ctx context.Context, updatedMovie *Movie) error - Destroy(ctx context.Context, id int) error - UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error - UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error -} - -type MovieReaderWriter interface { - MovieReader - MovieWriter -} diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 78d0a8995d0..752f1ce08e2 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -1,7 +1,6 @@ package models import ( - "context" "fmt" "io" "strconv" @@ -193,44 +192,3 @@ type PerformerFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type PerformerFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Performer, error) -} - -type PerformerReader interface { - Find(ctx context.Context, id int) (*Performer, error) - PerformerFinder - FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error) - FindByImageID(ctx context.Context, imageID int) ([]*Performer, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Performer, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Performer, error) - FindByStashID(ctx context.Context, stashID StashID) ([]*Performer, error) - FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Performer, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Performer, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error) - Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) - QueryCount(ctx context.Context, galleryFilter *PerformerFilterType, findFilter *FindFilterType) (int, error) - AliasLoader - GetImage(ctx context.Context, performerID int) ([]byte, error) - HasImage(ctx context.Context, performerID int) (bool, error) - StashIDLoader - TagIDLoader -} - -type PerformerWriter interface { - Create(ctx context.Context, newPerformer *Performer) error - UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) - Update(ctx context.Context, updatedPerformer *Performer) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, performerID int, image []byte) error -} - -type PerformerReaderWriter interface { - PerformerReader - PerformerWriter -} diff --git a/pkg/models/relationships.go b/pkg/models/relationships.go index f59e7d92e06..2e6f07708a0 100644 --- a/pkg/models/relationships.go +++ b/pkg/models/relationships.go @@ -1,15 +1,15 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) +import "context" type SceneIDLoader interface { GetSceneIDs(ctx context.Context, relatedID int) ([]int, error) } +type ImageIDLoader interface { + GetImageIDs(ctx context.Context, relatedID int) ([]int, error) +} + type GalleryIDLoader interface { GetGalleryIDs(ctx context.Context, relatedID int) ([]int, error) } @@ -22,6 +22,10 @@ type TagIDLoader interface { GetTagIDs(ctx context.Context, relatedID int) ([]int, error) } +type FileIDLoader interface { + GetManyFileIDs(ctx context.Context, ids []int) ([][]FileID, error) +} + type SceneMovieLoader interface { GetMovies(ctx context.Context, id int) ([]MoviesScenes, error) } @@ -31,11 +35,11 @@ type StashIDLoader interface { } type VideoFileLoader interface { - GetFiles(ctx context.Context, relatedID int) ([]*file.VideoFile, error) + GetFiles(ctx context.Context, relatedID int) ([]*VideoFile, error) } type FileLoader interface { - GetFiles(ctx context.Context, relatedID int) ([]file.File, error) + GetFiles(ctx context.Context, relatedID int) ([]File, error) } type AliasLoader interface { @@ -224,12 +228,12 @@ func (r *RelatedStashIDs) load(fn func() ([]StashID, error)) error { } type RelatedVideoFiles struct { - primaryFile *file.VideoFile - files []*file.VideoFile + primaryFile *VideoFile + files []*VideoFile primaryLoaded bool } -func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles { +func NewRelatedVideoFiles(files []*VideoFile) RelatedVideoFiles { ret := RelatedVideoFiles{ files: files, primaryLoaded: true, @@ -242,12 +246,12 @@ func NewRelatedVideoFiles(files []*file.VideoFile) RelatedVideoFiles { return ret } -func (r *RelatedVideoFiles) SetPrimary(f *file.VideoFile) { +func (r *RelatedVideoFiles) SetPrimary(f *VideoFile) { r.primaryFile = f r.primaryLoaded = true } -func (r *RelatedVideoFiles) Set(f []*file.VideoFile) { +func (r *RelatedVideoFiles) Set(f []*VideoFile) { r.files = f if len(r.files) > 0 { r.primaryFile = r.files[0] @@ -267,7 +271,7 @@ func (r RelatedVideoFiles) PrimaryLoaded() bool { } // List returns the related files. Panics if the relationship has not been loaded. -func (r RelatedVideoFiles) List() []*file.VideoFile { +func (r RelatedVideoFiles) List() []*VideoFile { if !r.Loaded() { panic("relationship has not been loaded") } @@ -276,7 +280,7 @@ func (r RelatedVideoFiles) List() []*file.VideoFile { } // Primary returns the primary file. Panics if the relationship has not been loaded. -func (r RelatedVideoFiles) Primary() *file.VideoFile { +func (r RelatedVideoFiles) Primary() *VideoFile { if !r.PrimaryLoaded() { panic("relationship has not been loaded") } @@ -284,7 +288,7 @@ func (r RelatedVideoFiles) Primary() *file.VideoFile { return r.primaryFile } -func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error { +func (r *RelatedVideoFiles) load(fn func() ([]*VideoFile, error)) error { if r.Loaded() { return nil } @@ -304,7 +308,7 @@ func (r *RelatedVideoFiles) load(fn func() ([]*file.VideoFile, error)) error { return nil } -func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) error { +func (r *RelatedVideoFiles) loadPrimary(fn func() (*VideoFile, error)) error { if r.PrimaryLoaded() { return nil } @@ -321,12 +325,12 @@ func (r *RelatedVideoFiles) loadPrimary(fn func() (*file.VideoFile, error)) erro } type RelatedFiles struct { - primaryFile file.File - files []file.File + primaryFile File + files []File primaryLoaded bool } -func NewRelatedFiles(files []file.File) RelatedFiles { +func NewRelatedFiles(files []File) RelatedFiles { ret := RelatedFiles{ files: files, primaryLoaded: true, @@ -350,7 +354,7 @@ func (r RelatedFiles) PrimaryLoaded() bool { } // List returns the related files. Panics if the relationship has not been loaded. -func (r RelatedFiles) List() []file.File { +func (r RelatedFiles) List() []File { if !r.Loaded() { panic("relationship has not been loaded") } @@ -359,7 +363,7 @@ func (r RelatedFiles) List() []file.File { } // Primary returns the primary file. Panics if the relationship has not been loaded. -func (r RelatedFiles) Primary() file.File { +func (r RelatedFiles) Primary() File { if !r.PrimaryLoaded() { panic("relationship has not been loaded") } @@ -367,7 +371,7 @@ func (r RelatedFiles) Primary() file.File { return r.primaryFile } -func (r *RelatedFiles) load(fn func() ([]file.File, error)) error { +func (r *RelatedFiles) load(fn func() ([]File, error)) error { if r.Loaded() { return nil } @@ -387,7 +391,7 @@ func (r *RelatedFiles) load(fn func() ([]file.File, error)) error { return nil } -func (r *RelatedFiles) loadPrimary(fn func() (file.File, error)) error { +func (r *RelatedFiles) loadPrimary(fn func() (File, error)) error { if r.PrimaryLoaded() { return nil } diff --git a/pkg/models/repository.go b/pkg/models/repository.go index fe0e21dc004..9ba4eead11a 100644 --- a/pkg/models/repository.go +++ b/pkg/models/repository.go @@ -1,7 +1,6 @@ package models import ( - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/txn" ) @@ -14,8 +13,8 @@ type TxnManager interface { type Repository struct { TxnManager - File file.Store - Folder file.FolderStore + File FileReaderWriter + Folder FolderReaderWriter Gallery GalleryReaderWriter GalleryChapter GalleryChapterReaderWriter Image ImageReaderWriter diff --git a/pkg/models/repository_file.go b/pkg/models/repository_file.go new file mode 100644 index 00000000000..8ea9709db9b --- /dev/null +++ b/pkg/models/repository_file.go @@ -0,0 +1,88 @@ +package models + +import ( + "context" + "io/fs" +) + +// FileGetter provides methods to get files by ID. +type FileGetter interface { + Find(ctx context.Context, id ...FileID) ([]File, error) +} + +// FileFinder provides methods to find files. +type FileFinder interface { + FileGetter + FindAllByPath(ctx context.Context, path string) ([]File, error) + FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error) + FindByPath(ctx context.Context, path string) (File, error) + FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]File, error) + FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]File, error) +} + +// FileQueryer provides methods to query files. +type FileQueryer interface { + Query(ctx context.Context, options FileQueryOptions) (*FileQueryResult, error) +} + +// FileCounter provides methods to count files. +type FileCounter interface { + CountAllInPaths(ctx context.Context, p []string) (int, error) + CountByFolderID(ctx context.Context, folderID FolderID) (int, error) +} + +// FileCreator provides methods to create files. +type FileCreator interface { + Create(ctx context.Context, f File) error +} + +// FileUpdater provides methods to update files. +type FileUpdater interface { + Update(ctx context.Context, f File) error +} + +// FileDestroyer provides methods to destroy files. +type FileDestroyer interface { + Destroy(ctx context.Context, id FileID) error +} + +type FileFinderCreator interface { + FileFinder + FileCreator +} + +type FileFinderUpdater interface { + FileFinder + FileUpdater +} + +type FileFinderDestroyer interface { + FileFinder + FileDestroyer +} + +// FileReader provides all methods to read files. +type FileReader interface { + FileFinder + FileQueryer + FileCounter + + GetCaptions(ctx context.Context, fileID FileID) ([]*VideoCaption, error) + IsPrimary(ctx context.Context, fileID FileID) (bool, error) +} + +// FileWriter provides all methods to modify files. +type FileWriter interface { + FileCreator + FileUpdater + FileDestroyer + + UpdateCaptions(ctx context.Context, fileID FileID, captions []*VideoCaption) error +} + +// FileReaderWriter provides all file methods. +type FileReaderWriter interface { + FileReader + FileWriter +} diff --git a/pkg/models/repository_folder.go b/pkg/models/repository_folder.go new file mode 100644 index 00000000000..c3f82f52942 --- /dev/null +++ b/pkg/models/repository_folder.go @@ -0,0 +1,64 @@ +package models + +import "context" + +// FolderGetter provides methods to get folders by ID. +type FolderGetter interface { + Find(ctx context.Context, id FolderID) (*Folder, error) +} + +// FolderFinder provides methods to find folders. +type FolderFinder interface { + FolderGetter + FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error) + FindByPath(ctx context.Context, path string) (*Folder, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Folder, error) + FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error) +} + +type FolderCounter interface { + CountAllInPaths(ctx context.Context, p []string) (int, error) +} + +// FolderCreator provides methods to create folders. +type FolderCreator interface { + Create(ctx context.Context, f *Folder) error +} + +// FolderUpdater provides methods to update folders. +type FolderUpdater interface { + Update(ctx context.Context, f *Folder) error +} + +type FolderDestroyer interface { + Destroy(ctx context.Context, id FolderID) error +} + +type FolderFinderCreator interface { + FolderFinder + FolderCreator +} + +type FolderFinderDestroyer interface { + FolderFinder + FolderDestroyer +} + +// FolderReader provides all methods to read folders. +type FolderReader interface { + FolderFinder + FolderCounter +} + +// FolderWriter provides all methods to modify folders. +type FolderWriter interface { + FolderCreator + FolderUpdater + FolderDestroyer +} + +// FolderReaderWriter provides all folder methods. +type FolderReaderWriter interface { + FolderReader + FolderWriter +} diff --git a/pkg/models/repository_gallery.go b/pkg/models/repository_gallery.go new file mode 100644 index 00000000000..64019886cc7 --- /dev/null +++ b/pkg/models/repository_gallery.go @@ -0,0 +1,91 @@ +package models + +import "context" + +// GalleryGetter provides methods to get galleries by ID. +type GalleryGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Gallery, error) + Find(ctx context.Context, id int) (*Gallery, error) +} + +// GalleryFinder provides methods to find galleries. +type GalleryFinder interface { + GalleryGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Gallery, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Gallery, error) + FindByChecksums(ctx context.Context, checksums []string) ([]*Gallery, error) + FindByPath(ctx context.Context, path string) ([]*Gallery, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Gallery, error) + FindByFolderID(ctx context.Context, folderID FolderID) ([]*Gallery, error) + FindBySceneID(ctx context.Context, sceneID int) ([]*Gallery, error) + FindByImageID(ctx context.Context, imageID int) ([]*Gallery, error) + FindUserGalleryByTitle(ctx context.Context, title string) ([]*Gallery, error) +} + +// GalleryQueryer provides methods to query galleries. +type GalleryQueryer interface { + Query(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) ([]*Gallery, int, error) + QueryCount(ctx context.Context, galleryFilter *GalleryFilterType, findFilter *FindFilterType) (int, error) +} + +// GalleryCounter provides methods to count galleries. +type GalleryCounter interface { + Count(ctx context.Context) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) +} + +// GalleryCreator provides methods to create galleries. +type GalleryCreator interface { + Create(ctx context.Context, newGallery *Gallery, fileIDs []FileID) error +} + +// GalleryUpdater provides methods to update galleries. +type GalleryUpdater interface { + Update(ctx context.Context, updatedGallery *Gallery) error + UpdatePartial(ctx context.Context, id int, updatedGallery GalleryPartial) (*Gallery, error) + UpdateImages(ctx context.Context, galleryID int, imageIDs []int) error +} + +// GalleryDestroyer provides methods to destroy galleries. +type GalleryDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type GalleryCreatorUpdater interface { + GalleryCreator + GalleryUpdater +} + +// GalleryReader provides all methods to read galleries. +type GalleryReader interface { + GalleryFinder + GalleryQueryer + GalleryCounter + + FileIDLoader + ImageIDLoader + SceneIDLoader + PerformerIDLoader + TagIDLoader + FileLoader + + All(ctx context.Context) ([]*Gallery, error) +} + +// GalleryWriter provides all methods to modify galleries. +type GalleryWriter interface { + GalleryCreator + GalleryUpdater + GalleryDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + AddImages(ctx context.Context, galleryID int, imageIDs ...int) error + RemoveImages(ctx context.Context, galleryID int, imageIDs ...int) error +} + +// GalleryReaderWriter provides all gallery methods. +type GalleryReaderWriter interface { + GalleryReader + GalleryWriter +} diff --git a/pkg/models/repository_gallery_chapter.go b/pkg/models/repository_gallery_chapter.go new file mode 100644 index 00000000000..5a926a0003f --- /dev/null +++ b/pkg/models/repository_gallery_chapter.go @@ -0,0 +1,55 @@ +package models + +import "context" + +// GalleryChapterGetter provides methods to get gallery chapters by ID. +type GalleryChapterGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*GalleryChapter, error) + Find(ctx context.Context, id int) (*GalleryChapter, error) +} + +// GalleryChapterFinder provides methods to find gallery chapters. +type GalleryChapterFinder interface { + GalleryChapterGetter + FindByGalleryID(ctx context.Context, galleryID int) ([]*GalleryChapter, error) +} + +// GalleryChapterCreator provides methods to create gallery chapters. +type GalleryChapterCreator interface { + Create(ctx context.Context, newGalleryChapter *GalleryChapter) error +} + +// GalleryChapterUpdater provides methods to update gallery chapters. +type GalleryChapterUpdater interface { + Update(ctx context.Context, updatedGalleryChapter *GalleryChapter) error + UpdatePartial(ctx context.Context, id int, updatedGalleryChapter GalleryChapterPartial) (*GalleryChapter, error) +} + +// GalleryChapterDestroyer provides methods to destroy gallery chapters. +type GalleryChapterDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type GalleryChapterCreatorUpdater interface { + GalleryChapterCreator + GalleryChapterUpdater +} + +// GalleryChapterReader provides all methods to read gallery chapters. +type GalleryChapterReader interface { + GalleryChapterFinder +} + +// GalleryChapterWriter provides all methods to modify gallery chapters. +type GalleryChapterWriter interface { + GalleryChapterCreator + GalleryChapterUpdater + GalleryChapterDestroyer +} + +// GalleryChapterReaderWriter provides all gallery chapter methods. +type GalleryChapterReaderWriter interface { + GalleryChapterReader + GalleryChapterWriter +} diff --git a/pkg/models/repository_image.go b/pkg/models/repository_image.go new file mode 100644 index 00000000000..5b191b2ab8c --- /dev/null +++ b/pkg/models/repository_image.go @@ -0,0 +1,92 @@ +package models + +import "context" + +// ImageGetter provides methods to get images by ID. +type ImageGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Image, error) + Find(ctx context.Context, id int) (*Image, error) +} + +// ImageFinder provides methods to find images. +type ImageFinder interface { + ImageGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Image, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Image, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Image, error) + FindByFolderID(ctx context.Context, fileID FolderID) ([]*Image, error) + FindByZipFileID(ctx context.Context, zipFileID FileID) ([]*Image, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Image, error) +} + +// ImageQueryer provides methods to query images. +type ImageQueryer interface { + Query(ctx context.Context, options ImageQueryOptions) (*ImageQueryResult, error) + QueryCount(ctx context.Context, imageFilter *ImageFilterType, findFilter *FindFilterType) (int, error) +} + +// ImageCounter provides methods to count images. +type ImageCounter interface { + Count(ctx context.Context) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) + CountByGalleryID(ctx context.Context, galleryID int) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) +} + +// ImageCreator provides methods to create images. +type ImageCreator interface { + Create(ctx context.Context, newImage *ImageCreateInput) error +} + +// ImageUpdater provides methods to update images. +type ImageUpdater interface { + Update(ctx context.Context, updatedImage *Image) error + UpdatePartial(ctx context.Context, id int, partial ImagePartial) (*Image, error) + UpdatePerformers(ctx context.Context, imageID int, performerIDs []int) error + UpdateTags(ctx context.Context, imageID int, tagIDs []int) error +} + +// ImageDestroyer provides methods to destroy images. +type ImageDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type ImageCreatorUpdater interface { + ImageCreator + ImageUpdater +} + +// ImageReader provides all methods to read images. +type ImageReader interface { + ImageFinder + ImageQueryer + ImageCounter + + FileIDLoader + GalleryIDLoader + PerformerIDLoader + TagIDLoader + FileLoader + + All(ctx context.Context) ([]*Image, error) + Size(ctx context.Context) (float64, error) +} + +// ImageWriter provides all methods to modify images. +type ImageWriter interface { + ImageCreator + ImageUpdater + ImageDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + IncrementOCounter(ctx context.Context, id int) (int, error) + DecrementOCounter(ctx context.Context, id int) (int, error) + ResetOCounter(ctx context.Context, id int) (int, error) +} + +// ImageReaderWriter provides all image methods. +type ImageReaderWriter interface { + ImageReader + ImageWriter +} diff --git a/pkg/models/repository_movie.go b/pkg/models/repository_movie.go new file mode 100644 index 00000000000..9234ea7a5d1 --- /dev/null +++ b/pkg/models/repository_movie.go @@ -0,0 +1,86 @@ +package models + +import "context" + +// MovieGetter provides methods to get movies by ID. +type MovieGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Movie, error) + Find(ctx context.Context, id int) (*Movie, error) +} + +// MovieFinder provides methods to find movies. +type MovieFinder interface { + MovieGetter + FindByPerformerID(ctx context.Context, performerID int) ([]*Movie, error) + FindByStudioID(ctx context.Context, studioID int) ([]*Movie, error) + FindByName(ctx context.Context, name string, nocase bool) (*Movie, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Movie, error) +} + +// MovieQueryer provides methods to query movies. +type MovieQueryer interface { + Query(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) ([]*Movie, int, error) + QueryCount(ctx context.Context, movieFilter *MovieFilterType, findFilter *FindFilterType) (int, error) +} + +// MovieCounter provides methods to count movies. +type MovieCounter interface { + Count(ctx context.Context) (int, error) + CountByPerformerID(ctx context.Context, performerID int) (int, error) + CountByStudioID(ctx context.Context, studioID int) (int, error) +} + +// MovieCreator provides methods to create movies. +type MovieCreator interface { + Create(ctx context.Context, newMovie *Movie) error +} + +// MovieUpdater provides methods to update movies. +type MovieUpdater interface { + Update(ctx context.Context, updatedMovie *Movie) error + UpdatePartial(ctx context.Context, id int, updatedMovie MoviePartial) (*Movie, error) + UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error + UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error +} + +// MovieDestroyer provides methods to destroy movies. +type MovieDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type MovieCreatorUpdater interface { + MovieCreator + MovieUpdater +} + +type MovieFinderCreator interface { + MovieFinder + MovieCreator +} + +// MovieReader provides all methods to read movies. +type MovieReader interface { + MovieFinder + MovieQueryer + MovieCounter + + All(ctx context.Context) ([]*Movie, error) + GetFrontImage(ctx context.Context, movieID int) ([]byte, error) + HasFrontImage(ctx context.Context, movieID int) (bool, error) + GetBackImage(ctx context.Context, movieID int) ([]byte, error) + HasBackImage(ctx context.Context, movieID int) (bool, error) +} + +// MovieWriter provides all methods to modify movies. +type MovieWriter interface { + MovieCreator + MovieUpdater + MovieDestroyer +} + +// MovieReaderWriter provides all movie methods. +type MovieReaderWriter interface { + MovieReader + MovieWriter +} diff --git a/pkg/models/repository_performer.go b/pkg/models/repository_performer.go new file mode 100644 index 00000000000..aac7e0488e4 --- /dev/null +++ b/pkg/models/repository_performer.go @@ -0,0 +1,98 @@ +package models + +import "context" + +// PerformerGetter provides methods to get performers by ID. +type PerformerGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Performer, error) + Find(ctx context.Context, id int) (*Performer, error) +} + +// PerformerFinder provides methods to find performers. +type PerformerFinder interface { + PerformerGetter + FindBySceneID(ctx context.Context, sceneID int) ([]*Performer, error) + FindByImageID(ctx context.Context, imageID int) ([]*Performer, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Performer, error) + FindByStashID(ctx context.Context, stashID StashID) ([]*Performer, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Performer, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Performer, error) +} + +// PerformerQueryer provides methods to query performers. +type PerformerQueryer interface { + Query(ctx context.Context, performerFilter *PerformerFilterType, findFilter *FindFilterType) ([]*Performer, int, error) + QueryCount(ctx context.Context, galleryFilter *PerformerFilterType, findFilter *FindFilterType) (int, error) +} + +type PerformerAutoTagQueryer interface { + PerformerQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Performer, error) +} + +// PerformerCounter provides methods to count performers. +type PerformerCounter interface { + Count(ctx context.Context) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) +} + +// PerformerCreator provides methods to create performers. +type PerformerCreator interface { + Create(ctx context.Context, newPerformer *Performer) error +} + +// PerformerUpdater provides methods to update performers. +type PerformerUpdater interface { + Update(ctx context.Context, updatedPerformer *Performer) error + UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) + UpdateImage(ctx context.Context, performerID int, image []byte) error +} + +// PerformerDestroyer provides methods to destroy performers. +type PerformerDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type PerformerFinderCreator interface { + PerformerFinder + PerformerCreator +} + +type PerformerCreatorUpdater interface { + PerformerCreator + PerformerUpdater +} + +// PerformerReader provides all methods to read performers. +type PerformerReader interface { + PerformerFinder + PerformerQueryer + PerformerAutoTagQueryer + PerformerCounter + + AliasLoader + StashIDLoader + TagIDLoader + + All(ctx context.Context) ([]*Performer, error) + GetImage(ctx context.Context, performerID int) ([]byte, error) + HasImage(ctx context.Context, performerID int) (bool, error) +} + +// PerformerWriter provides all methods to modify performers. +type PerformerWriter interface { + PerformerCreator + PerformerUpdater + PerformerDestroyer +} + +// PerformerReaderWriter provides all performer methods. +type PerformerReaderWriter interface { + PerformerReader + PerformerWriter +} diff --git a/pkg/models/repository_scene.go b/pkg/models/repository_scene.go new file mode 100644 index 00000000000..fdd839ed6b3 --- /dev/null +++ b/pkg/models/repository_scene.go @@ -0,0 +1,115 @@ +package models + +import "context" + +// SceneGetter provides methods to get scenes by ID. +type SceneGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Scene, error) + Find(ctx context.Context, id int) (*Scene, error) +} + +// SceneFinder provides methods to find scenes. +type SceneFinder interface { + SceneGetter + FindByFingerprints(ctx context.Context, fp []Fingerprint) ([]*Scene, error) + FindByChecksum(ctx context.Context, checksum string) ([]*Scene, error) + FindByOSHash(ctx context.Context, oshash string) ([]*Scene, error) + FindByPath(ctx context.Context, path string) ([]*Scene, error) + FindByFileID(ctx context.Context, fileID FileID) ([]*Scene, error) + FindByPrimaryFileID(ctx context.Context, fileID FileID) ([]*Scene, error) + FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) + FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) + FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) + FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) +} + +// SceneQueryer provides methods to query scenes. +type SceneQueryer interface { + Query(ctx context.Context, options SceneQueryOptions) (*SceneQueryResult, error) + QueryCount(ctx context.Context, sceneFilter *SceneFilterType, findFilter *FindFilterType) (int, error) +} + +// SceneCounter provides methods to count scenes. +type SceneCounter interface { + Count(ctx context.Context) (int, error) + CountByPerformerID(ctx context.Context, performerID int) (int, error) + CountByMovieID(ctx context.Context, movieID int) (int, error) + CountByFileID(ctx context.Context, fileID FileID) (int, error) + CountByStudioID(ctx context.Context, studioID int) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) + CountMissingChecksum(ctx context.Context) (int, error) + CountMissingOSHash(ctx context.Context) (int, error) + OCount(ctx context.Context) (int, error) + OCountByPerformerID(ctx context.Context, performerID int) (int, error) + PlayCount(ctx context.Context) (int, error) + UniqueScenePlayCount(ctx context.Context) (int, error) +} + +// SceneCreator provides methods to create scenes. +type SceneCreator interface { + Create(ctx context.Context, newScene *Scene, fileIDs []FileID) error +} + +// SceneUpdater provides methods to update scenes. +type SceneUpdater interface { + Update(ctx context.Context, updatedScene *Scene) error + UpdatePartial(ctx context.Context, id int, updatedScene ScenePartial) (*Scene, error) + UpdateCover(ctx context.Context, sceneID int, cover []byte) error +} + +// SceneDestroyer provides methods to destroy scenes. +type SceneDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type SceneCreatorUpdater interface { + SceneCreator + SceneUpdater +} + +// SceneReader provides all methods to read scenes. +type SceneReader interface { + SceneFinder + SceneQueryer + SceneCounter + + URLLoader + FileIDLoader + GalleryIDLoader + PerformerIDLoader + TagIDLoader + SceneMovieLoader + StashIDLoader + VideoFileLoader + + All(ctx context.Context) ([]*Scene, error) + Wall(ctx context.Context, q *string) ([]*Scene, error) + Size(ctx context.Context) (float64, error) + Duration(ctx context.Context) (float64, error) + PlayDuration(ctx context.Context) (float64, error) + GetCover(ctx context.Context, sceneID int) ([]byte, error) + HasCover(ctx context.Context, sceneID int) (bool, error) +} + +// SceneWriter provides all methods to modify scenes. +type SceneWriter interface { + SceneCreator + SceneUpdater + SceneDestroyer + + AddFileID(ctx context.Context, id int, fileID FileID) error + AddGalleryIDs(ctx context.Context, sceneID int, galleryIDs []int) error + AssignFiles(ctx context.Context, sceneID int, fileID []FileID) error + IncrementOCounter(ctx context.Context, id int) (int, error) + DecrementOCounter(ctx context.Context, id int) (int, error) + ResetOCounter(ctx context.Context, id int) (int, error) + SaveActivity(ctx context.Context, sceneID int, resumeTime *float64, playDuration *float64) (bool, error) + IncrementWatchCount(ctx context.Context, sceneID int) (int, error) +} + +// SceneReaderWriter provides all scene methods. +type SceneReaderWriter interface { + SceneReader + SceneWriter +} diff --git a/pkg/models/repository_scene_marker.go b/pkg/models/repository_scene_marker.go new file mode 100644 index 00000000000..d35ec762f51 --- /dev/null +++ b/pkg/models/repository_scene_marker.go @@ -0,0 +1,76 @@ +package models + +import "context" + +// SceneMarkerGetter provides methods to get scene markers by ID. +type SceneMarkerGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*SceneMarker, error) + Find(ctx context.Context, id int) (*SceneMarker, error) +} + +// SceneMarkerFinder provides methods to find scene markers. +type SceneMarkerFinder interface { + SceneMarkerGetter + FindBySceneID(ctx context.Context, sceneID int) ([]*SceneMarker, error) +} + +// SceneMarkerQueryer provides methods to query scene markers. +type SceneMarkerQueryer interface { + Query(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]*SceneMarker, int, error) + QueryCount(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) (int, error) +} + +// SceneMarkerCounter provides methods to count scene markers. +type SceneMarkerCounter interface { + Count(ctx context.Context) (int, error) + CountByTagID(ctx context.Context, tagID int) (int, error) +} + +// SceneMarkerCreator provides methods to create scene markers. +type SceneMarkerCreator interface { + Create(ctx context.Context, newSceneMarker *SceneMarker) error +} + +// SceneMarkerUpdater provides methods to update scene markers. +type SceneMarkerUpdater interface { + Update(ctx context.Context, updatedSceneMarker *SceneMarker) error + UpdatePartial(ctx context.Context, id int, updatedSceneMarker SceneMarkerPartial) (*SceneMarker, error) + UpdateTags(ctx context.Context, markerID int, tagIDs []int) error +} + +// SceneMarkerDestroyer provides methods to destroy scene markers. +type SceneMarkerDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type SceneMarkerCreatorUpdater interface { + SceneMarkerCreator + SceneMarkerUpdater +} + +// SceneMarkerReader provides all methods to read scene markers. +type SceneMarkerReader interface { + SceneMarkerFinder + SceneMarkerQueryer + SceneMarkerCounter + + TagIDLoader + + All(ctx context.Context) ([]*SceneMarker, error) + Wall(ctx context.Context, q *string) ([]*SceneMarker, error) + GetMarkerStrings(ctx context.Context, q *string, sort *string) ([]*MarkerStringsResultType, error) +} + +// SceneMarkerWriter provides all methods to modify scene markers. +type SceneMarkerWriter interface { + SceneMarkerCreator + SceneMarkerUpdater + SceneMarkerDestroyer +} + +// SceneMarkerReaderWriter provides all scene marker methods. +type SceneMarkerReaderWriter interface { + SceneMarkerReader + SceneMarkerWriter +} diff --git a/pkg/models/repository_studio.go b/pkg/models/repository_studio.go new file mode 100644 index 00000000000..272bf8fed23 --- /dev/null +++ b/pkg/models/repository_studio.go @@ -0,0 +1,94 @@ +package models + +import "context" + +// StudioGetter provides methods to get studios by ID. +type StudioGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Studio, error) + Find(ctx context.Context, id int) (*Studio, error) +} + +// StudioFinder provides methods to find studios. +type StudioFinder interface { + StudioGetter + FindChildren(ctx context.Context, id int) ([]*Studio, error) + FindBySceneID(ctx context.Context, sceneID int) (*Studio, error) + FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error) + FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Studio, error) + FindByName(ctx context.Context, name string, nocase bool) (*Studio, error) +} + +// StudioQueryer provides methods to query studios. +type StudioQueryer interface { + Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) +} + +type StudioAutoTagQueryer interface { + StudioQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Studio, error) +} + +// StudioCounter provides methods to count studios. +type StudioCounter interface { + Count(ctx context.Context) (int, error) +} + +// StudioCreator provides methods to create studios. +type StudioCreator interface { + Create(ctx context.Context, newStudio *Studio) error +} + +// StudioUpdater provides methods to update studios. +type StudioUpdater interface { + Update(ctx context.Context, updatedStudio *Studio) error + UpdatePartial(ctx context.Context, updatedStudio StudioPartial) (*Studio, error) + UpdateImage(ctx context.Context, studioID int, image []byte) error +} + +// StudioDestroyer provides methods to destroy studios. +type StudioDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type StudioFinderCreator interface { + StudioFinder + StudioCreator +} + +type StudioCreatorUpdater interface { + StudioCreator + StudioUpdater +} + +// StudioReader provides all methods to read studios. +type StudioReader interface { + StudioFinder + StudioQueryer + StudioAutoTagQueryer + StudioCounter + + AliasLoader + StashIDLoader + + All(ctx context.Context) ([]*Studio, error) + GetImage(ctx context.Context, studioID int) ([]byte, error) + HasImage(ctx context.Context, studioID int) (bool, error) +} + +// StudioWriter provides all methods to modify studios. +type StudioWriter interface { + StudioCreator + StudioUpdater + StudioDestroyer +} + +// StudioReaderWriter provides all studio methods. +type StudioReaderWriter interface { + StudioReader + StudioWriter +} diff --git a/pkg/models/repository_tag.go b/pkg/models/repository_tag.go new file mode 100644 index 00000000000..ca8f6971bf7 --- /dev/null +++ b/pkg/models/repository_tag.go @@ -0,0 +1,106 @@ +package models + +import "context" + +// TagGetter provides methods to get tags by ID. +type TagGetter interface { + // TODO - rename this to Find and remove existing method + FindMany(ctx context.Context, ids []int) ([]*Tag, error) + Find(ctx context.Context, id int) (*Tag, error) +} + +// TagFinder provides methods to find tags. +type TagFinder interface { + TagGetter + FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) + FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) + FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error) + FindByChildTagID(ctx context.Context, childID int) ([]*Tag, error) + FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error) + FindByImageID(ctx context.Context, imageID int) ([]*Tag, error) + FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error) + FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error) + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error) + FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) + FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) +} + +// TagQueryer provides methods to query tags. +type TagQueryer interface { + Query(ctx context.Context, tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) +} + +type TagAutoTagQueryer interface { + TagQueryer + AliasLoader + + // TODO - this interface is temporary until the filter schema can fully + // support the query needed + QueryForAutoTag(ctx context.Context, words []string) ([]*Tag, error) +} + +// TagCounter provides methods to count tags. +type TagCounter interface { + Count(ctx context.Context) (int, error) + CountByParentTagID(ctx context.Context, parentID int) (int, error) + CountByChildTagID(ctx context.Context, childID int) (int, error) +} + +// TagCreator provides methods to create tags. +type TagCreator interface { + Create(ctx context.Context, newTag *Tag) error +} + +// TagUpdater provides methods to update tags. +type TagUpdater interface { + Update(ctx context.Context, updatedTag *Tag) error + UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) + UpdateAliases(ctx context.Context, tagID int, aliases []string) error + UpdateImage(ctx context.Context, tagID int, image []byte) error + UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error + UpdateChildTags(ctx context.Context, tagID int, parentIDs []int) error +} + +// TagDestroyer provides methods to destroy tags. +type TagDestroyer interface { + Destroy(ctx context.Context, id int) error +} + +type TagFinderCreator interface { + TagFinder + TagCreator +} + +type TagCreatorUpdater interface { + TagCreator + TagUpdater +} + +// TagReader provides all methods to read tags. +type TagReader interface { + TagFinder + TagQueryer + TagAutoTagQueryer + TagCounter + + AliasLoader + + All(ctx context.Context) ([]*Tag, error) + GetImage(ctx context.Context, tagID int) ([]byte, error) + HasImage(ctx context.Context, tagID int) (bool, error) +} + +// TagWriter provides all methods to modify tags. +type TagWriter interface { + TagCreator + TagUpdater + TagDestroyer + + Merge(ctx context.Context, source []int, destination int) error +} + +// TagReaderWriter provides all tags methods. +type TagReaderWriter interface { + TagReader + TagWriter +} diff --git a/pkg/models/scene.go b/pkg/models/scene.go index 8f8d2eaf420..e66576f3599 100644 --- a/pkg/models/scene.go +++ b/pkg/models/scene.go @@ -1,10 +1,6 @@ package models -import ( - "context" - - "github.com/stashapp/stash/pkg/file" -) +import "context" type PHashDuplicationCriterionInput struct { Duplicated *bool `json:"duplicated"` @@ -112,7 +108,7 @@ type SceneQueryResult struct { TotalDuration float64 TotalSize float64 - finder SceneFinder + getter SceneGetter scenes []*Scene resolveErr error } @@ -129,83 +125,16 @@ type ScenesDestroyInput struct { DeleteGenerated *bool `json:"delete_generated"` } -func NewSceneQueryResult(finder SceneFinder) *SceneQueryResult { +func NewSceneQueryResult(getter SceneGetter) *SceneQueryResult { return &SceneQueryResult{ - finder: finder, + getter: getter, } } func (r *SceneQueryResult) Resolve(ctx context.Context) ([]*Scene, error) { // cache results if r.scenes == nil && r.resolveErr == nil { - r.scenes, r.resolveErr = r.finder.FindMany(ctx, r.IDs) + r.scenes, r.resolveErr = r.getter.FindMany(ctx, r.IDs) } return r.scenes, r.resolveErr } - -type SceneFinder interface { - // TODO - rename this to Find and remove existing method - FindMany(ctx context.Context, ids []int) ([]*Scene, error) -} - -type SceneReader interface { - SceneFinder - // TODO - remove this in another PR - Find(ctx context.Context, id int) (*Scene, error) - FindByChecksum(ctx context.Context, checksum string) ([]*Scene, error) - FindByOSHash(ctx context.Context, oshash string) ([]*Scene, error) - FindByPath(ctx context.Context, path string) ([]*Scene, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Scene, error) - FindByGalleryID(ctx context.Context, performerID int) ([]*Scene, error) - FindDuplicates(ctx context.Context, distance int, durationDiff float64) ([][]*Scene, error) - - URLLoader - GalleryIDLoader - PerformerIDLoader - TagIDLoader - SceneMovieLoader - StashIDLoader - VideoFileLoader - - CountByPerformerID(ctx context.Context, performerID int) (int, error) - OCountByPerformerID(ctx context.Context, performerID int) (int, error) - OCount(ctx context.Context) (int, error) - // FindByStudioID(studioID int) ([]*Scene, error) - FindByMovieID(ctx context.Context, movieID int) ([]*Scene, error) - CountByMovieID(ctx context.Context, movieID int) (int, error) - Count(ctx context.Context) (int, error) - PlayCount(ctx context.Context) (int, error) - UniqueScenePlayCount(ctx context.Context) (int, error) - Size(ctx context.Context) (float64, error) - Duration(ctx context.Context) (float64, error) - PlayDuration(ctx context.Context) (float64, error) - // SizeCount() (string, error) - CountByStudioID(ctx context.Context, studioID int) (int, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - CountMissingChecksum(ctx context.Context) (int, error) - CountMissingOSHash(ctx context.Context) (int, error) - Wall(ctx context.Context, q *string) ([]*Scene, error) - All(ctx context.Context) ([]*Scene, error) - Query(ctx context.Context, options SceneQueryOptions) (*SceneQueryResult, error) - QueryCount(ctx context.Context, sceneFilter *SceneFilterType, findFilter *FindFilterType) (int, error) - GetCover(ctx context.Context, sceneID int) ([]byte, error) - HasCover(ctx context.Context, sceneID int) (bool, error) -} - -type SceneWriter interface { - Create(ctx context.Context, newScene *Scene, fileIDs []file.ID) error - Update(ctx context.Context, updatedScene *Scene) error - UpdatePartial(ctx context.Context, id int, updatedScene ScenePartial) (*Scene, error) - IncrementOCounter(ctx context.Context, id int) (int, error) - DecrementOCounter(ctx context.Context, id int) (int, error) - ResetOCounter(ctx context.Context, id int) (int, error) - SaveActivity(ctx context.Context, id int, resumeTime *float64, playDuration *float64) (bool, error) - IncrementWatchCount(ctx context.Context, id int) (int, error) - Destroy(ctx context.Context, id int) error - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - -type SceneReaderWriter interface { - SceneReader - SceneWriter -} diff --git a/pkg/models/scene_marker.go b/pkg/models/scene_marker.go index 673a547e975..4a10c0e2178 100644 --- a/pkg/models/scene_marker.go +++ b/pkg/models/scene_marker.go @@ -1,7 +1,5 @@ package models -import "context" - type SceneMarkerFilterType struct { // Filter to only include scene markers with this tag TagID *string `json:"tag_id"` @@ -28,30 +26,3 @@ type MarkerStringsResultType struct { ID string `json:"id"` Title string `json:"title"` } - -type SceneMarkerReader interface { - Find(ctx context.Context, id int) (*SceneMarker, error) - FindMany(ctx context.Context, ids []int) ([]*SceneMarker, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*SceneMarker, error) - CountByTagID(ctx context.Context, tagID int) (int, error) - GetMarkerStrings(ctx context.Context, q *string, sort *string) ([]*MarkerStringsResultType, error) - Wall(ctx context.Context, q *string) ([]*SceneMarker, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*SceneMarker, error) - Query(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) ([]*SceneMarker, int, error) - QueryCount(ctx context.Context, sceneMarkerFilter *SceneMarkerFilterType, findFilter *FindFilterType) (int, error) - GetTagIDs(ctx context.Context, imageID int) ([]int, error) -} - -type SceneMarkerWriter interface { - Create(ctx context.Context, newSceneMarker *SceneMarker) error - Update(ctx context.Context, updatedSceneMarker *SceneMarker) error - UpdatePartial(ctx context.Context, id int, updatedSceneMarker SceneMarkerPartial) (*SceneMarker, error) - Destroy(ctx context.Context, id int) error - UpdateTags(ctx context.Context, markerID int, tagIDs []int) error -} - -type SceneMarkerReaderWriter interface { - SceneMarkerReader - SceneMarkerWriter -} diff --git a/pkg/models/studio.go b/pkg/models/studio.go index f98173d2a54..0973df4e316 100644 --- a/pkg/models/studio.go +++ b/pkg/models/studio.go @@ -1,7 +1,5 @@ package models -import "context" - type StudioFilterType struct { And *StudioFilterType `json:"AND"` Or *StudioFilterType `json:"OR"` @@ -37,39 +35,3 @@ type StudioFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type StudioFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Studio, error) -} - -type StudioReader interface { - Find(ctx context.Context, id int) (*Studio, error) - StudioFinder - FindChildren(ctx context.Context, id int) ([]*Studio, error) - FindByName(ctx context.Context, name string, nocase bool) (*Studio, error) - FindByStashID(ctx context.Context, stashID StashID) ([]*Studio, error) - FindByStashIDStatus(ctx context.Context, hasStashID bool, stashboxEndpoint string) ([]*Studio, error) - Count(ctx context.Context) (int, error) - All(ctx context.Context) ([]*Studio, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Studio, error) - Query(ctx context.Context, studioFilter *StudioFilterType, findFilter *FindFilterType) ([]*Studio, int, error) - GetImage(ctx context.Context, studioID int) ([]byte, error) - HasImage(ctx context.Context, studioID int) (bool, error) - AliasLoader - StashIDLoader -} - -type StudioWriter interface { - Create(ctx context.Context, newStudio *Studio) error - UpdatePartial(ctx context.Context, input StudioPartial) (*Studio, error) - Update(ctx context.Context, updatedStudio *Studio) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, studioID int, image []byte) error -} - -type StudioReaderWriter interface { - StudioReader - StudioWriter -} diff --git a/pkg/models/tag.go b/pkg/models/tag.go index b8b8d78f98d..b2cff5a0ebc 100644 --- a/pkg/models/tag.go +++ b/pkg/models/tag.go @@ -1,7 +1,5 @@ package models -import "context" - type TagFilterType struct { And *TagFilterType `json:"AND"` Or *TagFilterType `json:"OR"` @@ -39,51 +37,3 @@ type TagFilterType struct { // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` } - -type TagFinder interface { - FindMany(ctx context.Context, ids []int) ([]*Tag, error) -} - -type TagReader interface { - Find(ctx context.Context, id int) (*Tag, error) - TagFinder - FindBySceneID(ctx context.Context, sceneID int) ([]*Tag, error) - FindByPerformerID(ctx context.Context, performerID int) ([]*Tag, error) - FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*Tag, error) - FindByImageID(ctx context.Context, imageID int) ([]*Tag, error) - FindByGalleryID(ctx context.Context, galleryID int) ([]*Tag, error) - FindByName(ctx context.Context, name string, nocase bool) (*Tag, error) - FindByNames(ctx context.Context, names []string, nocase bool) ([]*Tag, error) - FindByParentTagID(ctx context.Context, parentID int) ([]*Tag, error) - FindByChildTagID(ctx context.Context, childID int) ([]*Tag, error) - Count(ctx context.Context) (int, error) - CountByParentTagID(ctx context.Context, parentID int) (int, error) - CountByChildTagID(ctx context.Context, childID int) (int, error) - All(ctx context.Context) ([]*Tag, error) - // TODO - this interface is temporary until the filter schema can fully - // support the query needed - QueryForAutoTag(ctx context.Context, words []string) ([]*Tag, error) - Query(ctx context.Context, tagFilter *TagFilterType, findFilter *FindFilterType) ([]*Tag, int, error) - GetImage(ctx context.Context, tagID int) ([]byte, error) - HasImage(ctx context.Context, tagID int) (bool, error) - GetAliases(ctx context.Context, tagID int) ([]string, error) - FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) - FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*TagPath, error) -} - -type TagWriter interface { - Create(ctx context.Context, newTag *Tag) error - UpdatePartial(ctx context.Context, id int, updateTag TagPartial) (*Tag, error) - Update(ctx context.Context, updatedTag *Tag) error - Destroy(ctx context.Context, id int) error - UpdateImage(ctx context.Context, tagID int, image []byte) error - UpdateAliases(ctx context.Context, tagID int, aliases []string) error - Merge(ctx context.Context, source []int, destination int) error - UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error - UpdateChildTags(ctx context.Context, tagID int, parentIDs []int) error -} - -type TagReaderWriter interface { - TagReader - TagWriter -} diff --git a/pkg/movie/export.go b/pkg/movie/export.go index 09963ce5e87..5a6c49aa364 100644 --- a/pkg/movie/export.go +++ b/pkg/movie/export.go @@ -8,7 +8,6 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/utils" ) @@ -18,7 +17,7 @@ type ImageGetter interface { } // ToJSON converts a Movie into its JSON equivalent. -func ToJSON(ctx context.Context, reader ImageGetter, studioReader studio.Finder, movie *models.Movie) (*jsonschema.Movie, error) { +func ToJSON(ctx context.Context, reader ImageGetter, studioReader models.StudioGetter, movie *models.Movie) (*jsonschema.Movie, error) { newMovieJSON := jsonschema.Movie{ Name: movie.Name, Aliases: movie.Aliases, diff --git a/pkg/movie/import.go b/pkg/movie/import.go index 75e08b0bb1f..e231031e865 100644 --- a/pkg/movie/import.go +++ b/pkg/movie/import.go @@ -6,24 +6,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/studio" "github.com/stashapp/stash/pkg/utils" ) -type ImageUpdater interface { - UpdateFrontImage(ctx context.Context, movieID int, frontImage []byte) error - UpdateBackImage(ctx context.Context, movieID int, backImage []byte) error -} - -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedMovie *models.Movie) error - ImageUpdater +type ImporterReaderWriter interface { + models.MovieCreatorUpdater + FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) } type Importer struct { - ReaderWriter NameFinderCreatorUpdater - StudioWriter studio.NameFinderCreator + ReaderWriter ImporterReaderWriter + StudioWriter models.StudioFinderCreator Input jsonschema.Movie MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/movie/query.go b/pkg/movie/query.go index 3736f943798..3fac932a03d 100644 --- a/pkg/movie/query.go +++ b/pkg/movie/query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) ([]*models.Movie, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, movieFilter *models.MovieFilterType, findFilter *models.FindFilterType) (int, error) -} - -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.MovieQueryer, id int, depth *int) (int, error) { filter := &models.MovieFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/movie/update.go b/pkg/movie/update.go deleted file mode 100644 index 4111215e232..00000000000 --- a/pkg/movie/update.go +++ /dev/null @@ -1,12 +0,0 @@ -package movie - -import ( - "context" - - "github.com/stashapp/stash/pkg/models" -) - -type NameFinderCreator interface { - FindByName(ctx context.Context, name string, nocase bool) (*models.Movie, error) - Create(ctx context.Context, newMovie *models.Movie) error -} diff --git a/pkg/performer/import.go b/pkg/performer/import.go index f84030a6ed7..1c3c075a447 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -10,19 +10,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedPerformer *models.Performer) error - UpdateImage(ctx context.Context, performerID int, image []byte) error +type ImporterReaderWriter interface { + models.PerformerCreatorUpdater + models.PerformerQueryer } type Importer struct { - ReaderWriter NameFinderCreatorUpdater - TagWriter tag.NameFinderCreator + ReaderWriter ImporterReaderWriter + TagWriter models.TagFinderCreator Input jsonschema.Performer MissingRefBehaviour models.ImportMissingRefEnum @@ -65,7 +63,7 @@ func (i *Importer) populateTags(ctx context.Context) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -100,7 +98,7 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { newTag := models.NewTag(name) diff --git a/pkg/performer/query.go b/pkg/performer/query.go index b8df03a1c51..d85fa514866 100644 --- a/pkg/performer/query.go +++ b/pkg/performer/query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, galleryFilter *models.PerformerFilterType, findFilter *models.FindFilterType) (int, error) -} - -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.PerformerQueryer, id int, depth *int) (int, error) { filter := &models.PerformerFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -27,7 +19,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.PerformerQueryer, id int, depth *int) (int, error) { filter := &models.PerformerFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -39,7 +31,7 @@ func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, return r.QueryCount(ctx, filter, nil) } -func CountByAppearsWith(ctx context.Context, r CountQueryer, id int) (int, error) { +func CountByAppearsWith(ctx context.Context, r models.PerformerQueryer, id int) (int, error) { filter := &models.PerformerFilterType{ Performers: &models.MultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/performer/update.go b/pkg/performer/update.go deleted file mode 100644 index d846eb6ce93..00000000000 --- a/pkg/performer/update.go +++ /dev/null @@ -1,13 +0,0 @@ -package performer - -import ( - "context" - - "github.com/stashapp/stash/pkg/models" -) - -type NameFinderCreator interface { - FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Performer, error) - Query(ctx context.Context, performerFilter *models.PerformerFilterType, findFilter *models.FindFilterType) ([]*models.Performer, int, error) - Create(ctx context.Context, newPerformer *models.Performer) error -} diff --git a/pkg/scene/create.go b/pkg/scene/create.go index c2345d2ef95..428c636a771 100644 --- a/pkg/scene/create.go +++ b/pkg/scene/create.go @@ -6,12 +6,11 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/plugin" ) -func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []file.ID, coverImage []byte) (*models.Scene, error) { +func (s *Service) Create(ctx context.Context, input *models.Scene, fileIDs []models.FileID, coverImage []byte) (*models.Scene, error) { // title must be set if no files are provided if input.Title == "" && len(fileIDs) == 0 { return nil, errors.New("title must be set if scene has no files") diff --git a/pkg/scene/delete.go b/pkg/scene/delete.go index c7e8fdcc4be..7426c390b4b 100644 --- a/pkg/scene/delete.go +++ b/pkg/scene/delete.go @@ -105,15 +105,6 @@ func (d *FileDeleter) MarkMarkerFiles(scene *models.Scene, seconds int) error { return d.Files(files) } -type Destroyer interface { - Destroy(ctx context.Context, id int) error -} - -type MarkerDestroyer interface { - FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) - Destroy(ctx context.Context, id int) error -} - // Destroy deletes a scene and its associated relationships from the // database. func (s *Service) Destroy(ctx context.Context, scene *models.Scene, fileDeleter *FileDeleter, deleteGenerated, deleteFile bool) error { @@ -190,7 +181,7 @@ func (s *Service) deleteFiles(ctx context.Context, scene *models.Scene, fileDele // DestroyMarker deletes the scene marker from the database and returns a // function that removes the generated files, to be executed after the // transaction is successfully committed. -func DestroyMarker(ctx context.Context, scene *models.Scene, sceneMarker *models.SceneMarker, qb MarkerDestroyer, fileDeleter *FileDeleter) error { +func DestroyMarker(ctx context.Context, scene *models.Scene, sceneMarker *models.SceneMarker, qb models.SceneMarkerDestroyer, fileDeleter *FileDeleter) error { if err := qb.Destroy(ctx, sceneMarker.ID); err != nil { return err } diff --git a/pkg/scene/export.go b/pkg/scene/export.go index 5fa3b8b2df5..90419e2c4d7 100644 --- a/pkg/scene/export.go +++ b/pkg/scene/export.go @@ -11,8 +11,6 @@ import ( "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" "github.com/stashapp/stash/pkg/sliceutil/intslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) @@ -20,18 +18,10 @@ type CoverGetter interface { GetCover(ctx context.Context, sceneID int) ([]byte, error) } -type MarkerTagFinder interface { - tag.Finder - TagFinder - FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) -} - -type MarkerFinder interface { - FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) -} - type TagFinder interface { + models.TagGetter FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) + FindBySceneMarkerID(ctx context.Context, sceneMarkerID int) ([]*models.Tag, error) } // ToBasicJSON converts a scene object into its JSON object equivalent. It @@ -88,7 +78,7 @@ func ToBasicJSON(ctx context.Context, reader CoverGetter, scene *models.Scene) ( // GetStudioName returns the name of the provided scene's studio. It returns an // empty string if there is no studio assigned to the scene. -func GetStudioName(ctx context.Context, reader studio.Finder, scene *models.Scene) (string, error) { +func GetStudioName(ctx context.Context, reader models.StudioGetter, scene *models.Scene) (string, error) { if scene.StudioID != nil { studio, err := reader.Find(ctx, *scene.StudioID) if err != nil { @@ -126,7 +116,7 @@ func getTagNames(tags []*models.Tag) []string { } // GetDependentTagIDs returns a slice of unique tag IDs that this scene references. -func GetDependentTagIDs(ctx context.Context, tags MarkerTagFinder, markerReader MarkerFinder, scene *models.Scene) ([]int, error) { +func GetDependentTagIDs(ctx context.Context, tags TagFinder, markerReader models.SceneMarkerFinder, scene *models.Scene) ([]int, error) { var ret []int t, err := tags.FindBySceneID(ctx, scene.ID) @@ -158,13 +148,9 @@ func GetDependentTagIDs(ctx context.Context, tags MarkerTagFinder, markerReader return ret, nil } -type MovieFinder interface { - Find(ctx context.Context, id int) (*models.Movie, error) -} - // GetSceneMoviesJSON returns a slice of SceneMovie JSON representation objects // corresponding to the provided scene's scene movie relationships. -func GetSceneMoviesJSON(ctx context.Context, movieReader MovieFinder, scene *models.Scene) ([]jsonschema.SceneMovie, error) { +func GetSceneMoviesJSON(ctx context.Context, movieReader models.MovieGetter, scene *models.Scene) ([]jsonschema.SceneMovie, error) { sceneMovies := scene.Movies.List() var results []jsonschema.SceneMovie @@ -202,7 +188,7 @@ func GetDependentMovieIDs(ctx context.Context, scene *models.Scene) ([]int, erro // GetSceneMarkersJSON returns a slice of SceneMarker JSON representation // objects corresponding to the provided scene's markers. -func GetSceneMarkersJSON(ctx context.Context, markerReader MarkerFinder, tagReader MarkerTagFinder, scene *models.Scene) ([]jsonschema.SceneMarker, error) { +func GetSceneMarkersJSON(ctx context.Context, markerReader models.SceneMarkerFinder, tagReader TagFinder, scene *models.Scene) ([]jsonschema.SceneMarker, error) { sceneMarkers, err := markerReader.FindBySceneID(ctx, scene.ID) if err != nil { return nil, fmt.Errorf("error getting scene markers: %v", err) diff --git a/pkg/scene/export_test.go b/pkg/scene/export_test.go index 85a63aa5518..19e12ecea70 100644 --- a/pkg/scene/export_test.go +++ b/pkg/scene/export_test.go @@ -3,7 +3,6 @@ package scene import ( "errors" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/json" "github.com/stashapp/stash/pkg/models/jsonschema" @@ -93,9 +92,9 @@ func createFullScene(id int) models.Scene { Rating: &rating, Organized: organized, URLs: models.NewRelatedStrings([]string{url}), - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ { - BaseFile: &file.BaseFile{ + BaseFile: &models.BaseFile{ Path: path, }, }, @@ -111,9 +110,9 @@ func createFullScene(id int) models.Scene { func createEmptyScene(id int) models.Scene { return models.Scene{ ID: id, - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ { - BaseFile: &file.BaseFile{ + BaseFile: &models.BaseFile{ Path: path, }, }, diff --git a/pkg/scene/filename_parser.go b/pkg/scene/filename_parser.go index 3dfab35384b..b7c38863e54 100644 --- a/pkg/scene/filename_parser.go +++ b/pkg/scene/filename_parser.go @@ -450,11 +450,11 @@ func (p *FilenameParser) initWhiteSpaceRegex() { } type FilenameParserRepository struct { - Scene Queryer + Scene models.SceneQueryer Performer PerformerNamesFinder - Studio studio.Queryer + Studio models.StudioQueryer Movie MovieNameFinder - Tag tag.Queryer + Tag models.TagQueryer } func (p *FilenameParser) Parse(ctx context.Context, repo FilenameParserRepository) ([]*models.SceneParserResult, int, error) { @@ -544,7 +544,7 @@ func (p *FilenameParser) queryPerformer(ctx context.Context, qb PerformerNamesFi return ret } -func (p *FilenameParser) queryStudio(ctx context.Context, qb studio.Queryer, studioName string) *models.Studio { +func (p *FilenameParser) queryStudio(ctx context.Context, qb models.StudioQueryer, studioName string) *models.Studio { // massage the performer name studioName = delimiterRE.ReplaceAllString(studioName, " ") @@ -587,7 +587,7 @@ func (p *FilenameParser) queryMovie(ctx context.Context, qb MovieNameFinder, mov return ret } -func (p *FilenameParser) queryTag(ctx context.Context, qb tag.Queryer, tagName string) *models.Tag { +func (p *FilenameParser) queryTag(ctx context.Context, qb models.TagQueryer, tagName string) *models.Tag { // massage the tag name tagName = delimiterRE.ReplaceAllString(tagName, " ") @@ -626,7 +626,7 @@ func (p *FilenameParser) setPerformers(ctx context.Context, qb PerformerNamesFin } } -func (p *FilenameParser) setTags(ctx context.Context, qb tag.Queryer, h sceneHolder, result *models.SceneParserResult) { +func (p *FilenameParser) setTags(ctx context.Context, qb models.TagQueryer, h sceneHolder, result *models.SceneParserResult) { // query for each performer tagsSet := make(map[int]bool) for _, tagName := range h.tags { @@ -642,7 +642,7 @@ func (p *FilenameParser) setTags(ctx context.Context, qb tag.Queryer, h sceneHol } } -func (p *FilenameParser) setStudio(ctx context.Context, qb studio.Queryer, h sceneHolder, result *models.SceneParserResult) { +func (p *FilenameParser) setStudio(ctx context.Context, qb models.StudioQueryer, h sceneHolder, result *models.SceneParserResult) { // query for each performer if h.studio != "" { studio := p.queryStudio(ctx, qb, h.studio) diff --git a/pkg/scene/hash.go b/pkg/scene/hash.go index 4b06a73ef9c..efa9c0fd3f1 100644 --- a/pkg/scene/hash.go +++ b/pkg/scene/hash.go @@ -1,18 +1,17 @@ package scene import ( - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) // GetHash returns the hash of the file, based on the hash algorithm provided. If // hash algorithm is MD5, then Checksum is returned. Otherwise, OSHash is returned. -func GetHash(f file.File, hashAlgorithm models.HashAlgorithm) string { +func GetHash(f models.File, hashAlgorithm models.HashAlgorithm) string { switch hashAlgorithm { case models.HashAlgorithmMd5: - return f.Base().Fingerprints.GetString(file.FingerprintTypeMD5) + return f.Base().Fingerprints.GetString(models.FingerprintTypeMD5) case models.HashAlgorithmOshash: - return f.Base().Fingerprints.GetString(file.FingerprintTypeOshash) + return f.Base().Fingerprints.GetString(models.FingerprintTypeOshash) default: panic("unknown hash algorithm") } diff --git a/pkg/scene/import.go b/pkg/scene/import.go index 2d73c0f2cb0..e2cfe8abaff 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -5,32 +5,25 @@ import ( "fmt" "strings" - "github.com/stashapp/stash/pkg/file" - "github.com/stashapp/stash/pkg/gallery" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/movie" - "github.com/stashapp/stash/pkg/performer" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/utils" ) -type FullCreatorUpdater interface { - CreatorUpdater - Update(ctx context.Context, updatedScene *models.Scene) error - Updater +type ImporterReaderWriter interface { + models.SceneCreatorUpdater + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) } type Importer struct { - ReaderWriter FullCreatorUpdater - FileFinder file.Getter - StudioWriter studio.NameFinderCreator - GalleryFinder gallery.Finder - PerformerWriter performer.NameFinderCreator - MovieWriter movie.NameFinderCreator - TagWriter tag.NameFinderCreator + ReaderWriter ImporterReaderWriter + FileFinder models.FileFinder + StudioWriter models.StudioFinderCreator + GalleryFinder models.GalleryFinder + PerformerWriter models.PerformerFinderCreator + MovieWriter models.MovieFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.Scene MissingRefBehaviour models.ImportMissingRefEnum FileNamingAlgorithm models.HashAlgorithm @@ -123,7 +116,7 @@ func (i *Importer) sceneJSONToScene(sceneJSON jsonschema.Scene) models.Scene { } func (i *Importer) populateFiles(ctx context.Context) error { - files := make([]*file.VideoFile, 0) + files := make([]*models.VideoFile, 0) for _, ref := range i.Input.Files { path := ref @@ -135,7 +128,7 @@ func (i *Importer) populateFiles(ctx context.Context) error { if f == nil { return fmt.Errorf("scene file '%s' not found", path) } else { - files = append(files, f.(*file.VideoFile)) + files = append(files, f.(*models.VideoFile)) } } @@ -413,7 +406,7 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - var fileIDs []file.ID + var fileIDs []models.FileID for _, f := range i.scene.Files.List() { fileIDs = append(fileIDs, f.Base().ID) } @@ -437,7 +430,7 @@ func (i *Importer) Update(ctx context.Context, id int) error { return nil } -func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { +func importTags(ctx context.Context, tagWriter models.TagFinderCreator, names []string, missingRefBehaviour models.ImportMissingRefEnum) ([]*models.Tag, error) { tags, err := tagWriter.FindByNames(ctx, names, false) if err != nil { return nil, err @@ -472,7 +465,7 @@ func importTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []st return tags, nil } -func createTags(ctx context.Context, tagWriter tag.NameFinderCreator, names []string) ([]*models.Tag, error) { +func createTags(ctx context.Context, tagWriter models.TagCreator, names []string) ([]*models.Tag, error) { var ret []*models.Tag for _, name := range names { newTag := models.NewTag(name) diff --git a/pkg/scene/marker_import.go b/pkg/scene/marker_import.go index 20127cbf8db..33937af7e10 100644 --- a/pkg/scene/marker_import.go +++ b/pkg/scene/marker_import.go @@ -7,20 +7,17 @@ import ( "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/jsonschema" - "github.com/stashapp/stash/pkg/tag" ) type MarkerCreatorUpdater interface { - Create(ctx context.Context, newSceneMarker *models.SceneMarker) error - Update(ctx context.Context, updatedSceneMarker *models.SceneMarker) error + models.SceneMarkerCreatorUpdater FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error) - UpdateTags(ctx context.Context, markerID int, tagIDs []int) error } type MarkerImporter struct { SceneID int ReaderWriter MarkerCreatorUpdater - TagWriter tag.NameFinderCreator + TagWriter models.TagFinderCreator Input jsonschema.SceneMarker MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/scene/marker_query.go b/pkg/scene/marker_query.go index e4ae5b6dfae..d9cd311a78b 100644 --- a/pkg/scene/marker_query.go +++ b/pkg/scene/marker_query.go @@ -7,15 +7,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type MarkerQueryer interface { - Query(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) ([]*models.SceneMarker, int, error) -} - -type MarkerCountQueryer interface { - QueryCount(ctx context.Context, sceneMarkerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) (int, error) -} - -func MarkerCountByTagID(ctx context.Context, r MarkerCountQueryer, id int, depth *int) (int, error) { +func MarkerCountByTagID(ctx context.Context, r models.SceneMarkerQueryer, id int, depth *int) (int, error) { filter := &models.SceneMarkerFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/scene/merge.go b/pkg/scene/merge.go index ed660d83e2b..8934f5515a8 100644 --- a/pkg/scene/merge.go +++ b/pkg/scene/merge.go @@ -6,7 +6,6 @@ import ( "fmt" "os" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/fsutil" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -33,7 +32,7 @@ func (s *Service) Merge(ctx context.Context, sourceIDs []int, destinationID int, return fmt.Errorf("finding source scenes: %w", err) } - var fileIDs []file.ID + var fileIDs []models.FileID for _, src := range sources { // TODO - delete generated files as needed diff --git a/pkg/scene/migrate_screenshots.go b/pkg/scene/migrate_screenshots.go index 94d73643f07..59eade29957 100644 --- a/pkg/scene/migrate_screenshots.go +++ b/pkg/scene/migrate_screenshots.go @@ -20,7 +20,8 @@ type MigrateSceneScreenshotsInput struct { type HashFinderCoverUpdater interface { FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) - CoverUpdater + HasCover(ctx context.Context, sceneID int) (bool, error) + UpdateCover(ctx context.Context, sceneID int, cover []byte) error } type ScreenshotMigrator struct { diff --git a/pkg/scene/query.go b/pkg/scene/query.go index 3dc7524ed90..a8b1993a6a0 100644 --- a/pkg/scene/query.go +++ b/pkg/scene/query.go @@ -11,19 +11,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Queryer interface { - Query(ctx context.Context, options models.SceneQueryOptions) (*models.SceneQueryResult, error) -} - -type CountQueryer interface { - QueryCount(ctx context.Context, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) (int, error) -} - -type IDFinder interface { - Find(ctx context.Context, id int) (*models.Scene, error) - FindMany(ctx context.Context, ids []int) ([]*models.Scene, error) -} - // QueryOptions returns a SceneQueryOptions populated with the provided filters. func QueryOptions(sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, count bool) models.SceneQueryOptions { return models.SceneQueryOptions{ @@ -36,7 +23,7 @@ func QueryOptions(sceneFilter *models.SceneFilterType, findFilter *models.FindFi } // QueryWithCount queries for scenes, returning the scene objects and the total count. -func QueryWithCount(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { +func QueryWithCount(ctx context.Context, qb models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, int, error) { // this was moved from the queryBuilder code // left here so that calling functions can reference this instead result, err := qb.Query(ctx, QueryOptions(sceneFilter, findFilter, true)) @@ -53,7 +40,7 @@ func QueryWithCount(ctx context.Context, qb Queryer, sceneFilter *models.SceneFi } // Query queries for scenes using the provided filters. -func Query(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, error) { +func Query(ctx context.Context, qb models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType) ([]*models.Scene, error) { result, err := qb.Query(ctx, QueryOptions(sceneFilter, findFilter, false)) if err != nil { return nil, err @@ -67,7 +54,7 @@ func Query(ctx context.Context, qb Queryer, sceneFilter *models.SceneFilterType, return scenes, nil } -func BatchProcess(ctx context.Context, reader Queryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, fn func(scene *models.Scene) error) error { +func BatchProcess(ctx context.Context, reader models.SceneQueryer, sceneFilter *models.SceneFilterType, findFilter *models.FindFilterType, fn func(scene *models.Scene) error) error { const batchSize = 1000 if findFilter == nil { @@ -134,7 +121,7 @@ func FilterFromPaths(paths []string) *models.SceneFilterType { return ret } -func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByStudioID(ctx context.Context, r models.SceneQueryer, id int, depth *int) (int, error) { filter := &models.SceneFilterType{ Studios: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, @@ -146,7 +133,7 @@ func CountByStudioID(ctx context.Context, r CountQueryer, id int, depth *int) (i return r.QueryCount(ctx, filter, nil) } -func CountByTagID(ctx context.Context, r CountQueryer, id int, depth *int) (int, error) { +func CountByTagID(ctx context.Context, r models.SceneQueryer, id int, depth *int) (int, error) { filter := &models.SceneFilterType{ Tags: &models.HierarchicalMultiCriterionInput{ Value: []string{strconv.Itoa(id)}, diff --git a/pkg/scene/scan.go b/pkg/scene/scan.go index 5ccdee25601..f16d0d5c61b 100644 --- a/pkg/scene/scan.go +++ b/pkg/scene/scan.go @@ -6,7 +6,6 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/file/video" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" @@ -19,21 +18,22 @@ var ( ErrNotVideoFile = errors.New("not a video file") ) -type CreatorUpdater interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) - FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) - Creator +type ScanCreatorUpdater interface { + FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) + FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) + GetFiles(ctx context.Context, relatedID int) ([]*models.VideoFile, error) + + Create(ctx context.Context, newScene *models.Scene, fileIDs []models.FileID) error UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error) - AddFileID(ctx context.Context, id int, fileID file.ID) error - models.VideoFileLoader + AddFileID(ctx context.Context, id int, fileID models.FileID) error } type ScanGenerator interface { - Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error + Generate(ctx context.Context, s *models.Scene, f *models.VideoFile) error } type ScanHandler struct { - CreatorUpdater CreatorUpdater + CreatorUpdater ScanCreatorUpdater ScanGenerator ScanGenerator CaptionUpdater video.CaptionUpdater @@ -63,12 +63,12 @@ func (h *ScanHandler) validate() error { return nil } -func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File) error { +func (h *ScanHandler) Handle(ctx context.Context, f models.File, oldFile models.File) error { if err := h.validate(); err != nil { return err } - videoFile, ok := f.(*file.VideoFile) + videoFile, ok := f.(*models.VideoFile) if !ok { return ErrNotVideoFile } @@ -108,7 +108,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File logger.Infof("%s doesn't exist. Creating new scene...", f.Base().Path) - if err := h.CreatorUpdater.Create(ctx, newScene, []file.ID{videoFile.ID}); err != nil { + if err := h.CreatorUpdater.Create(ctx, newScene, []models.FileID{videoFile.ID}); err != nil { return fmt.Errorf("creating new scene: %w", err) } @@ -140,7 +140,7 @@ func (h *ScanHandler) Handle(ctx context.Context, f file.File, oldFile file.File return nil } -func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *file.VideoFile, updateExisting bool) error { +func (h *ScanHandler) associateExisting(ctx context.Context, existing []*models.Scene, f *models.VideoFile, updateExisting bool) error { for _, s := range existing { if err := s.LoadFiles(ctx, h.CreatorUpdater); err != nil { return err diff --git a/pkg/scene/service.go b/pkg/scene/service.go index f7b51ce1e95..05fa9f532eb 100644 --- a/pkg/scene/service.go +++ b/pkg/scene/service.go @@ -1,58 +1,19 @@ package scene import ( - "context" - - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/models/paths" "github.com/stashapp/stash/pkg/plugin" ) -type FinderByFile interface { - FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) -} - -type FileAssigner interface { - AssignFiles(ctx context.Context, sceneID int, fileID []file.ID) error -} - -type Creator interface { - Create(ctx context.Context, newScene *models.Scene, fileIDs []file.ID) error -} - -type CoverUpdater interface { - HasCover(ctx context.Context, sceneID int) (bool, error) - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - type Config interface { GetVideoFileNamingAlgorithm() models.HashAlgorithm } -type Repository interface { - IDFinder - FinderByFile - Creator - PartialUpdater - Destroyer - models.VideoFileLoader - FileAssigner - CoverUpdater - models.SceneReader -} - -type MarkerRepository interface { - MarkerFinder - MarkerDestroyer - - Update(ctx context.Context, updatedObject *models.SceneMarker) error -} - type Service struct { - File file.Store - Repository Repository - MarkerRepository MarkerRepository + File models.FileReaderWriter + Repository models.SceneReaderWriter + MarkerRepository models.SceneMarkerReaderWriter PluginCache *plugin.Cache Paths *paths.Paths diff --git a/pkg/scene/update.go b/pkg/scene/update.go index e3f3e252bde..f0a1a030f83 100644 --- a/pkg/scene/update.go +++ b/pkg/scene/update.go @@ -6,20 +6,10 @@ import ( "fmt" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/utils" ) -type Updater interface { - PartialUpdater - UpdateCover(ctx context.Context, sceneID int, cover []byte) error -} - -type PartialUpdater interface { - UpdatePartial(ctx context.Context, id int, updatedScene models.ScenePartial) (*models.Scene, error) -} - var ErrEmptyUpdater = errors.New("no fields have been set") // UpdateSet is used to update a scene and its relationships. @@ -46,7 +36,7 @@ func (u *UpdateSet) IsEmpty() bool { // Update updates a scene by updating the fields in the Partial field, then // updates non-nil relationships. Returns an error if there is no work to // be done. -func (u *UpdateSet) Update(ctx context.Context, qb Updater) (*models.Scene, error) { +func (u *UpdateSet) Update(ctx context.Context, qb models.SceneUpdater) (*models.Scene, error) { if u.IsEmpty() { return nil, ErrEmptyUpdater } @@ -83,7 +73,7 @@ func (u UpdateSet) UpdateInput() models.SceneUpdateInput { return ret } -func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Scene, performerID int) error { +func AddPerformer(ctx context.Context, qb models.SceneUpdater, o *models.Scene, performerID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ PerformerIDs: &models.UpdateIDs{ IDs: []int{performerID}, @@ -93,7 +83,7 @@ func AddPerformer(ctx context.Context, qb PartialUpdater, o *models.Scene, perfo return err } -func AddTag(ctx context.Context, qb PartialUpdater, o *models.Scene, tagID int) error { +func AddTag(ctx context.Context, qb models.SceneUpdater, o *models.Scene, tagID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ TagIDs: &models.UpdateIDs{ IDs: []int{tagID}, @@ -103,7 +93,7 @@ func AddTag(ctx context.Context, qb PartialUpdater, o *models.Scene, tagID int) return err } -func AddGallery(ctx context.Context, qb PartialUpdater, o *models.Scene, galleryID int) error { +func AddGallery(ctx context.Context, qb models.SceneUpdater, o *models.Scene, galleryID int) error { _, err := qb.UpdatePartial(ctx, o.ID, models.ScenePartial{ TagIDs: &models.UpdateIDs{ IDs: []int{galleryID}, @@ -113,7 +103,7 @@ func AddGallery(ctx context.Context, qb PartialUpdater, o *models.Scene, gallery return err } -func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) error { +func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID models.FileID) error { // ensure file isn't a primary file and that it is a video file f, err := s.File.Find(ctx, fileID) if err != nil { @@ -121,7 +111,7 @@ func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) e } ff := f[0] - if _, ok := ff.(*file.VideoFile); !ok { + if _, ok := ff.(*models.VideoFile); !ok { return fmt.Errorf("%s is not a video file", ff.Base().Path) } @@ -134,5 +124,5 @@ func (s *Service) AssignFile(ctx context.Context, sceneID int, fileID file.ID) e return errors.New("cannot reassign primary file") } - return s.Repository.AssignFiles(ctx, sceneID, []file.ID{fileID}) + return s.Repository.AssignFiles(ctx, sceneID, []models.FileID{fileID}) } diff --git a/pkg/scraper/autotag.go b/pkg/scraper/autotag.go index 6ba8b371d5c..5eb3922a804 100644 --- a/pkg/scraper/autotag.go +++ b/pkg/scraper/autotag.go @@ -20,14 +20,14 @@ const ( type autotagScraper struct { // repository models.Repository txnManager txn.Manager - performerReader match.PerformerAutoTagQueryer - studioReader match.StudioAutoTagQueryer - tagReader match.TagAutoTagQueryer + performerReader models.PerformerAutoTagQueryer + studioReader models.StudioAutoTagQueryer + tagReader models.TagAutoTagQueryer globalConfig GlobalConfig } -func autotagMatchPerformers(ctx context.Context, path string, performerReader match.PerformerAutoTagQueryer, trimExt bool) ([]*models.ScrapedPerformer, error) { +func autotagMatchPerformers(ctx context.Context, path string, performerReader models.PerformerAutoTagQueryer, trimExt bool) ([]*models.ScrapedPerformer, error) { p, err := match.PathToPerformers(ctx, path, performerReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching performers: %w", err) @@ -52,7 +52,7 @@ func autotagMatchPerformers(ctx context.Context, path string, performerReader ma return ret, nil } -func autotagMatchStudio(ctx context.Context, path string, studioReader match.StudioAutoTagQueryer, trimExt bool) (*models.ScrapedStudio, error) { +func autotagMatchStudio(ctx context.Context, path string, studioReader models.StudioAutoTagQueryer, trimExt bool) (*models.ScrapedStudio, error) { studio, err := match.PathToStudio(ctx, path, studioReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching studios: %w", err) @@ -69,7 +69,7 @@ func autotagMatchStudio(ctx context.Context, path string, studioReader match.Stu return nil, nil } -func autotagMatchTags(ctx context.Context, path string, tagReader match.TagAutoTagQueryer, trimExt bool) ([]*models.ScrapedTag, error) { +func autotagMatchTags(ctx context.Context, path string, tagReader models.TagAutoTagQueryer, trimExt bool) ([]*models.ScrapedTag, error) { t, err := match.PathToTags(ctx, path, tagReader, nil, trimExt) if err != nil { return nil, fmt.Errorf("error matching tags: %w", err) diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index d526ecb0a6b..c110944f624 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -15,8 +15,6 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/scene" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" ) @@ -53,27 +51,27 @@ func isCDPPathWS(c GlobalConfig) bool { } type SceneFinder interface { - scene.IDFinder + models.SceneGetter models.URLLoader } type PerformerFinder interface { - match.PerformerAutoTagQueryer + models.PerformerAutoTagQueryer match.PerformerFinder } type StudioFinder interface { - match.StudioAutoTagQueryer - match.StudioFinder + models.StudioAutoTagQueryer + FindByStashID(ctx context.Context, stashID models.StashID) ([]*models.Studio, error) } type TagFinder interface { - match.TagAutoTagQueryer - tag.Queryer + models.TagGetter + models.TagAutoTagQueryer } type GalleryFinder interface { - Find(ctx context.Context, id int) (*models.Gallery, error) + models.GalleryGetter models.FileLoader } diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go index e2d404d7c19..e504e4d1cac 100644 --- a/pkg/scraper/postprocessing.go +++ b/pkg/scraper/postprocessing.go @@ -6,7 +6,6 @@ import ( "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" ) @@ -201,7 +200,7 @@ func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery) (Scraped return g, nil } -func postProcessTags(ctx context.Context, tqb tag.Queryer, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) { +func postProcessTags(ctx context.Context, tqb models.TagQueryer, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) { var ret []*models.ScrapedTag for _, t := range scrapedTags { diff --git a/pkg/scraper/stashbox/stash_box.go b/pkg/scraper/stashbox/stash_box.go index 6a5df09e9d6..7abff7032e2 100644 --- a/pkg/scraper/stashbox/stash_box.go +++ b/pkg/scraper/stashbox/stash_box.go @@ -19,41 +19,39 @@ import ( "github.com/Yamashou/gqlgenc/graphqljson" "github.com/gofrs/uuid" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/match" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/scraper" "github.com/stashapp/stash/pkg/scraper/stashbox/graphql" "github.com/stashapp/stash/pkg/sliceutil/stringslice" - "github.com/stashapp/stash/pkg/studio" - "github.com/stashapp/stash/pkg/tag" "github.com/stashapp/stash/pkg/txn" "github.com/stashapp/stash/pkg/utils" ) type SceneReader interface { - Find(ctx context.Context, id int) (*models.Scene, error) + models.SceneGetter models.StashIDLoader models.VideoFileLoader } type PerformerReader interface { + models.PerformerGetter match.PerformerFinder - Find(ctx context.Context, id int) (*models.Performer, error) - FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) models.AliasLoader models.StashIDLoader + FindBySceneID(ctx context.Context, sceneID int) ([]*models.Performer, error) GetImage(ctx context.Context, performerID int) ([]byte, error) } type StudioReader interface { + models.StudioGetter match.StudioFinder - studio.Finder models.StashIDLoader } + type TagFinder interface { - tag.Queryer + models.TagQueryer FindBySceneID(ctx context.Context, sceneID int) ([]*models.Tag, error) } @@ -151,7 +149,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) var sceneFPs []*graphql.FingerprintQueryInput for _, f := range scene.Files.List() { - checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5) + checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5) if checksum != "" { sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ Hash: checksum, @@ -159,7 +157,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) }) } - oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash) + oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash) if oshash != "" { sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ Hash: oshash, @@ -167,7 +165,7 @@ func (c Client) FindStashBoxScenesByFingerprints(ctx context.Context, ids []int) }) } - phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash) + phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash) if phash != 0 { phashStr := utils.PhashToString(phash) sceneFPs = append(sceneFPs, &graphql.FingerprintQueryInput{ @@ -279,7 +277,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin duration := f.Duration if duration != 0 { - if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" { + if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" { fingerprint := graphql.FingerprintInput{ Hash: checksum, Algorithm: graphql.FingerprintAlgorithmMd5, @@ -291,7 +289,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin }) } - if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" { + if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" { fingerprint := graphql.FingerprintInput{ Hash: oshash, Algorithm: graphql.FingerprintAlgorithmOshash, @@ -303,7 +301,7 @@ func (c Client) SubmitStashBoxFingerprints(ctx context.Context, sceneIDs []strin }) } - if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 { + if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 { fingerprint := graphql.FingerprintInput{ Hash: utils.PhashToString(phash), Algorithm: graphql.FingerprintAlgorithmPhash, @@ -979,7 +977,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo duration := f.Duration if duration != 0 { - if oshash := f.Fingerprints.GetString(file.FingerprintTypeOshash); oshash != "" { + if oshash := f.Fingerprints.GetString(models.FingerprintTypeOshash); oshash != "" { fingerprint := graphql.FingerprintInput{ Hash: oshash, Algorithm: graphql.FingerprintAlgorithmOshash, @@ -988,7 +986,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo fingerprints = appendFingerprintUnique(fingerprints, &fingerprint) } - if checksum := f.Fingerprints.GetString(file.FingerprintTypeMD5); checksum != "" { + if checksum := f.Fingerprints.GetString(models.FingerprintTypeMD5); checksum != "" { fingerprint := graphql.FingerprintInput{ Hash: checksum, Algorithm: graphql.FingerprintAlgorithmMd5, @@ -997,7 +995,7 @@ func (c Client) SubmitSceneDraft(ctx context.Context, scene *models.Scene, endpo fingerprints = appendFingerprintUnique(fingerprints, &fingerprint) } - if phash := f.Fingerprints.GetInt64(file.FingerprintTypePhash); phash != 0 { + if phash := f.Fingerprints.GetInt64(models.FingerprintTypePhash); phash != 0 { fingerprint := graphql.FingerprintInput{ Hash: utils.PhashToString(phash), Algorithm: graphql.FingerprintAlgorithmPhash, diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go index 40a2555fd68..cd87a887c25 100644 --- a/pkg/sqlite/database.go +++ b/pkg/sqlite/database.go @@ -33,7 +33,7 @@ const ( dbConnTimeout = 30 ) -var appSchemaVersion uint = 48 +var appSchemaVersion uint = 49 //go:embed migrations/*.sql var migrationsBox embed.FS @@ -74,10 +74,10 @@ type Database struct { Scene *SceneStore SceneMarker *SceneMarkerStore Performer *PerformerStore + SavedFilter *SavedFilterStore Studio *StudioStore Tag *TagStore Movie *MovieStore - SavedFilter *SavedFilterStore db *sqlx.DB dbPath string diff --git a/pkg/sqlite/file.go b/pkg/sqlite/file.go index 760a7746558..2113aad13fd 100644 --- a/pkg/sqlite/file.go +++ b/pkg/sqlite/file.go @@ -13,7 +13,6 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) @@ -31,17 +30,17 @@ const ( ) type basicFileRow struct { - ID file.ID `db:"id" goqu:"skipinsert"` - Basename string `db:"basename"` - ZipFileID null.Int `db:"zip_file_id"` - ParentFolderID file.FolderID `db:"parent_folder_id"` - Size int64 `db:"size"` - ModTime Timestamp `db:"mod_time"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + ID models.FileID `db:"id" goqu:"skipinsert"` + Basename string `db:"basename"` + ZipFileID null.Int `db:"zip_file_id"` + ParentFolderID models.FolderID `db:"parent_folder_id"` + Size int64 `db:"size"` + ModTime Timestamp `db:"mod_time"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` } -func (r *basicFileRow) fromBasicFile(o file.BaseFile) { +func (r *basicFileRow) fromBasicFile(o models.BaseFile) { r.ID = o.ID r.Basename = o.Basename r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID) @@ -53,20 +52,20 @@ func (r *basicFileRow) fromBasicFile(o file.BaseFile) { } type videoFileRow struct { - FileID file.ID `db:"file_id"` - Format string `db:"format"` - Width int `db:"width"` - Height int `db:"height"` - Duration float64 `db:"duration"` - VideoCodec string `db:"video_codec"` - AudioCodec string `db:"audio_codec"` - FrameRate float64 `db:"frame_rate"` - BitRate int64 `db:"bit_rate"` - Interactive bool `db:"interactive"` - InteractiveSpeed null.Int `db:"interactive_speed"` -} - -func (f *videoFileRow) fromVideoFile(ff file.VideoFile) { + FileID models.FileID `db:"file_id"` + Format string `db:"format"` + Width int `db:"width"` + Height int `db:"height"` + Duration float64 `db:"duration"` + VideoCodec string `db:"video_codec"` + AudioCodec string `db:"audio_codec"` + FrameRate float64 `db:"frame_rate"` + BitRate int64 `db:"bit_rate"` + Interactive bool `db:"interactive"` + InteractiveSpeed null.Int `db:"interactive_speed"` +} + +func (f *videoFileRow) fromVideoFile(ff models.VideoFile) { f.FileID = ff.ID f.Format = ff.Format f.Width = ff.Width @@ -81,13 +80,13 @@ func (f *videoFileRow) fromVideoFile(ff file.VideoFile) { } type imageFileRow struct { - FileID file.ID `db:"file_id"` - Format string `db:"format"` - Width int `db:"width"` - Height int `db:"height"` + FileID models.FileID `db:"file_id"` + Format string `db:"format"` + Width int `db:"width"` + Height int `db:"height"` } -func (f *imageFileRow) fromImageFile(ff file.ImageFile) { +func (f *imageFileRow) fromImageFile(ff models.ImageFile) { f.FileID = ff.ID f.Format = ff.Format f.Width = ff.Width @@ -110,8 +109,8 @@ type videoFileQueryRow struct { InteractiveSpeed null.Int `db:"interactive_speed"` } -func (f *videoFileQueryRow) resolve() *file.VideoFile { - return &file.VideoFile{ +func (f *videoFileQueryRow) resolve() *models.VideoFile { + return &models.VideoFile{ Format: f.Format.String, Width: int(f.Width.Int64), Height: int(f.Height.Int64), @@ -159,8 +158,8 @@ func (imageFileQueryRow) columns(table *table) []interface{} { } } -func (f *imageFileQueryRow) resolve() *file.ImageFile { - return &file.ImageFile{ +func (f *imageFileQueryRow) resolve() *models.ImageFile { + return &models.ImageFile{ Format: f.Format.String, Width: int(f.Width.Int64), Height: int(f.Height.Int64), @@ -186,15 +185,15 @@ type fileQueryRow struct { imageFileQueryRow } -func (r *fileQueryRow) resolve() file.File { - basic := &file.BaseFile{ - ID: file.ID(r.FileID.Int64), - DirEntry: file.DirEntry{ +func (r *fileQueryRow) resolve() models.File { + basic := &models.BaseFile{ + ID: models.FileID(r.FileID.Int64), + DirEntry: models.DirEntry{ ZipFileID: nullIntFileIDPtr(r.ZipFileID), ModTime: r.ModTime.Timestamp, }, Path: filepath.Join(r.FolderPath.String, r.Basename.String), - ParentFolderID: file.FolderID(r.ParentFolderID.Int64), + ParentFolderID: models.FolderID(r.ParentFolderID.Int64), Basename: r.Basename.String, Size: r.Size.Int64, CreatedAt: r.CreatedAt.Timestamp, @@ -202,14 +201,14 @@ func (r *fileQueryRow) resolve() file.File { } if basic.ZipFileID != nil && r.ZipFolderPath.Valid && r.ZipBasename.Valid { - basic.ZipFile = &file.BaseFile{ + basic.ZipFile = &models.BaseFile{ ID: *basic.ZipFileID, Path: filepath.Join(r.ZipFolderPath.String, r.ZipBasename.String), Basename: r.ZipBasename.String, } } - var ret file.File = basic + var ret models.File = basic if r.videoFileQueryRow.Format.Valid { vf := r.videoFileQueryRow.resolve() @@ -228,7 +227,7 @@ func (r *fileQueryRow) resolve() file.File { return ret } -func appendFingerprintsUnique(vs []file.Fingerprint, v ...file.Fingerprint) []file.Fingerprint { +func appendFingerprintsUnique(vs []models.Fingerprint, v ...models.Fingerprint) []models.Fingerprint { for _, vv := range v { found := false for _, vsv := range vs { @@ -245,7 +244,7 @@ func appendFingerprintsUnique(vs []file.Fingerprint, v ...file.Fingerprint) []fi return vs } -func (r *fileQueryRow) appendRelationships(i *file.BaseFile) { +func (r *fileQueryRow) appendRelationships(i *models.BaseFile) { if r.fingerprintQueryRow.valid() { i.Fingerprints = appendFingerprintsUnique(i.Fingerprints, r.fingerprintQueryRow.resolve()) } @@ -253,16 +252,16 @@ func (r *fileQueryRow) appendRelationships(i *file.BaseFile) { type fileQueryRows []fileQueryRow -func (r fileQueryRows) resolve() []file.File { - var ret []file.File - var last file.File - var lastID file.ID +func (r fileQueryRows) resolve() []models.File { + var ret []models.File + var last models.File + var lastID models.FileID for _, row := range r { - if last == nil || lastID != file.ID(row.FileID.Int64) { + if last == nil || lastID != models.FileID(row.FileID.Int64) { f := row.resolve() last = f - lastID = file.ID(row.FileID.Int64) + lastID = models.FileID(row.FileID.Int64) ret = append(ret, last) continue } @@ -295,7 +294,7 @@ func (qb *FileStore) table() exp.IdentifierExpression { return qb.tableMgr.table } -func (qb *FileStore) Create(ctx context.Context, f file.File) error { +func (qb *FileStore) Create(ctx context.Context, f models.File) error { var r basicFileRow r.fromBasicFile(*f.Base()) @@ -304,15 +303,15 @@ func (qb *FileStore) Create(ctx context.Context, f file.File) error { return err } - fileID := file.ID(id) + fileID := models.FileID(id) // create extended stuff here switch ef := f.(type) { - case *file.VideoFile: + case *models.VideoFile: if err := qb.createVideoFile(ctx, fileID, *ef); err != nil { return err } - case *file.ImageFile: + case *models.ImageFile: if err := qb.createImageFile(ctx, fileID, *ef); err != nil { return err } @@ -333,7 +332,7 @@ func (qb *FileStore) Create(ctx context.Context, f file.File) error { return nil } -func (qb *FileStore) Update(ctx context.Context, f file.File) error { +func (qb *FileStore) Update(ctx context.Context, f models.File) error { var r basicFileRow r.fromBasicFile(*f.Base()) @@ -345,11 +344,11 @@ func (qb *FileStore) Update(ctx context.Context, f file.File) error { // create extended stuff here switch ef := f.(type) { - case *file.VideoFile: + case *models.VideoFile: if err := qb.updateOrCreateVideoFile(ctx, id, *ef); err != nil { return err } - case *file.ImageFile: + case *models.ImageFile: if err := qb.updateOrCreateImageFile(ctx, id, *ef); err != nil { return err } @@ -362,11 +361,11 @@ func (qb *FileStore) Update(ctx context.Context, f file.File) error { return nil } -func (qb *FileStore) Destroy(ctx context.Context, id file.ID) error { +func (qb *FileStore) Destroy(ctx context.Context, id models.FileID) error { return qb.tableMgr.destroyExisting(ctx, []int{int(id)}) } -func (qb *FileStore) createVideoFile(ctx context.Context, id file.ID, f file.VideoFile) error { +func (qb *FileStore) createVideoFile(ctx context.Context, id models.FileID, f models.VideoFile) error { var r videoFileRow r.fromVideoFile(f) r.FileID = id @@ -377,7 +376,7 @@ func (qb *FileStore) createVideoFile(ctx context.Context, id file.ID, f file.Vid return nil } -func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id file.ID, f file.VideoFile) error { +func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id models.FileID, f models.VideoFile) error { exists, err := videoFileTableMgr.idExists(ctx, id) if err != nil { return err @@ -397,7 +396,7 @@ func (qb *FileStore) updateOrCreateVideoFile(ctx context.Context, id file.ID, f return nil } -func (qb *FileStore) createImageFile(ctx context.Context, id file.ID, f file.ImageFile) error { +func (qb *FileStore) createImageFile(ctx context.Context, id models.FileID, f models.ImageFile) error { var r imageFileRow r.fromImageFile(f) r.FileID = id @@ -408,7 +407,7 @@ func (qb *FileStore) createImageFile(ctx context.Context, id file.ID, f file.Ima return nil } -func (qb *FileStore) updateOrCreateImageFile(ctx context.Context, id file.ID, f file.ImageFile) error { +func (qb *FileStore) updateOrCreateImageFile(ctx context.Context, id models.FileID, f models.ImageFile) error { exists, err := imageFileTableMgr.idExists(ctx, id) if err != nil { return err @@ -515,7 +514,7 @@ func (qb *FileStore) countDataset() *goqu.SelectDataset { ) } -func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (file.File, error) { +func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (models.File, error) { ret, err := qb.getMany(ctx, q) if err != nil { return nil, err @@ -528,7 +527,7 @@ func (qb *FileStore) get(ctx context.Context, q *goqu.SelectDataset) (file.File, return ret[0], nil } -func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]file.File, error) { +func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]models.File, error) { const single = false var rows fileQueryRows if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { @@ -546,8 +545,8 @@ func (qb *FileStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]file return rows.resolve(), nil } -func (qb *FileStore) Find(ctx context.Context, ids ...file.ID) ([]file.File, error) { - var files []file.File +func (qb *FileStore) Find(ctx context.Context, ids ...models.FileID) ([]models.File, error) { + var files []models.File for _, id := range ids { file, err := qb.find(ctx, id) if err != nil { @@ -564,7 +563,7 @@ func (qb *FileStore) Find(ctx context.Context, ids ...file.ID) ([]file.File, err return files, nil } -func (qb *FileStore) find(ctx context.Context, id file.ID) (file.File, error) { +func (qb *FileStore) find(ctx context.Context, id models.FileID) (models.File, error) { q := qb.selectDataset().Where(qb.tableMgr.byID(id)) ret, err := qb.get(ctx, q) @@ -576,7 +575,7 @@ func (qb *FileStore) find(ctx context.Context, id file.ID) (file.File, error) { } // FindByPath returns the first file that matches the given path. Wildcard characters are supported. -func (qb *FileStore) FindByPath(ctx context.Context, p string) (file.File, error) { +func (qb *FileStore) FindByPath(ctx context.Context, p string) (models.File, error) { ret, err := qb.FindAllByPath(ctx, p) @@ -593,7 +592,7 @@ func (qb *FileStore) FindByPath(ctx context.Context, p string) (file.File, error // FindAllByPath returns all the files that match the given path. // Wildcard characters are supported. -func (qb *FileStore) FindAllByPath(ctx context.Context, p string) ([]file.File, error) { +func (qb *FileStore) FindAllByPath(ctx context.Context, p string) ([]models.File, error) { // separate basename from path basename := filepath.Base(p) dirName := filepath.Dir(p) @@ -646,7 +645,7 @@ func (qb *FileStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.SelectD // FindAllByPaths returns the all files that are within any of the given paths. // Returns all if limit is < 0. // Returns all files if p is empty. -func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]file.File, error) { +func (qb *FileStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]models.File, error) { table := qb.table() folderTable := folderTableMgr.table @@ -680,7 +679,7 @@ func (qb *FileStore) CountAllInPaths(ctx context.Context, p []string) (int, erro return count(ctx, q) } -func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) ([]file.File, error) { +func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) ([]models.File, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( @@ -692,7 +691,7 @@ func (qb *FileStore) findBySubquery(ctx context.Context, sq *goqu.SelectDataset) return qb.getMany(ctx, q) } -func (qb *FileStore) FindByFingerprint(ctx context.Context, fp file.Fingerprint) ([]file.File, error) { +func (qb *FileStore) FindByFingerprint(ctx context.Context, fp models.Fingerprint) ([]models.File, error) { fingerprintTable := fingerprintTableMgr.table fingerprints := fingerprintTable.As("fp") @@ -705,7 +704,7 @@ func (qb *FileStore) FindByFingerprint(ctx context.Context, fp file.Fingerprint) return qb.findBySubquery(ctx, sq) } -func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]file.File, error) { +func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]models.File, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( @@ -716,7 +715,7 @@ func (qb *FileStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([] } // FindByFileInfo finds files that match the base name, size, and mod time of the given file. -func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]file.File, error) { +func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size int64) ([]models.File, error) { table := qb.table() modTime := info.ModTime().Format(time.RFC3339) @@ -730,7 +729,7 @@ func (qb *FileStore) FindByFileInfo(ctx context.Context, info fs.FileInfo, size return qb.getMany(ctx, q) } -func (qb *FileStore) CountByFolderID(ctx context.Context, folderID file.FolderID) (int, error) { +func (qb *FileStore) CountByFolderID(ctx context.Context, folderID models.FolderID) (int, error) { table := qb.table() q := qb.countDataset().Prepared(true).Where( @@ -740,7 +739,7 @@ func (qb *FileStore) CountByFolderID(ctx context.Context, folderID file.FolderID return count(ctx, q) } -func (qb *FileStore) IsPrimary(ctx context.Context, fileID file.ID) (bool, error) { +func (qb *FileStore) IsPrimary(ctx context.Context, fileID models.FileID) (bool, error) { joinTables := []exp.IdentifierExpression{ scenesFilesJoinTable, galleriesFilesJoinTable, @@ -867,9 +866,9 @@ func (qb *FileStore) Query(ctx context.Context, options models.FileQueryOptions) return nil, fmt.Errorf("error finding IDs: %w", err) } - result.IDs = make([]file.ID, len(idsResult)) + result.IDs = make([]models.FileID, len(idsResult)) for i, id := range idsResult { - result.IDs[i] = file.ID(id) + result.IDs[i] = models.FileID(id) } return result, nil @@ -929,10 +928,10 @@ func (qb *FileStore) captionRepository() *captionRepository { } } -func (qb *FileStore) GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error) { +func (qb *FileStore) GetCaptions(ctx context.Context, fileID models.FileID) ([]*models.VideoCaption, error) { return qb.captionRepository().get(ctx, fileID) } -func (qb *FileStore) UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error { +func (qb *FileStore) UpdateCaptions(ctx context.Context, fileID models.FileID, captions []*models.VideoCaption) error { return qb.captionRepository().replace(ctx, fileID, captions) } diff --git a/pkg/sqlite/file_test.go b/pkg/sqlite/file_test.go index 2bcbe42e956..766ffcc70b7 100644 --- a/pkg/sqlite/file_test.go +++ b/pkg/sqlite/file_test.go @@ -9,7 +9,7 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -17,10 +17,10 @@ func getFilePath(folderIdx int, basename string) string { return filepath.Join(folderPaths[folderIdx], basename) } -func makeZipFileWithID(index int) file.File { +func makeZipFileWithID(index int) models.File { f := makeFile(index) - return &file.BaseFile{ + return &models.BaseFile{ ID: fileIDs[index], Basename: f.Base().Basename, Path: getFilePath(fileFolders[index], getFileBaseName(index)), @@ -49,13 +49,13 @@ func Test_fileFileStore_Create(t *testing.T) { tests := []struct { name string - newObject file.File + newObject models.File wantErr bool }{ { "full", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -64,7 +64,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -77,9 +77,9 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "video file", - &file.VideoFile{ - BaseFile: &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.VideoFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -88,7 +88,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -110,9 +110,9 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "image file", - &file.ImageFile{ - BaseFile: &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.ImageFile{ + BaseFile: &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -121,7 +121,7 @@ func Test_fileFileStore_Create(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -138,15 +138,15 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "duplicate path", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ModTime: fileModTime, }, Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)), ParentFolderID: folderIDs[folderIdxWithFiles], Basename: getFileBaseName(fileIdxZip), Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -159,22 +159,22 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "empty basename", - &file.BaseFile{ + &models.BaseFile{ ParentFolderID: folderIDs[folderIdxWithFiles], }, true, }, { "missing folder id", - &file.BaseFile{ + &models.BaseFile{ Basename: basename, }, true, }, { "invalid folder id", - &file.BaseFile{ - DirEntry: file.DirEntry{}, + &models.BaseFile{ + DirEntry: models.DirEntry{}, ParentFolderID: invalidFolderID, Basename: basename, }, @@ -182,8 +182,8 @@ func Test_fileFileStore_Create(t *testing.T) { }, { "invalid zip file id", - &file.BaseFile{ - DirEntry: file.DirEntry{ + &models.BaseFile{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Basename: basename, @@ -210,15 +210,15 @@ func Test_fileFileStore_Create(t *testing.T) { assert.NotZero(s.Base().ID) - var copy file.File + var copy models.File switch t := s.(type) { - case *file.BaseFile: + case *models.BaseFile: v := *t copy = &v - case *file.VideoFile: + case *models.VideoFile: v := *t copy = &v - case *file.ImageFile: + case *models.ImageFile: v := *t copy = &v } @@ -266,14 +266,14 @@ func Test_fileStore_Update(t *testing.T) { tests := []struct { name string - updatedObject file.File + updatedObject models.File wantErr bool }{ { "full", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -282,7 +282,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -295,10 +295,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "video file", - &file.VideoFile{ - BaseFile: &file.BaseFile{ + &models.VideoFile{ + BaseFile: &models.BaseFile{ ID: fileIDs[fileIdxStartVideoFiles], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -307,7 +307,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -329,10 +329,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "image file", - &file.ImageFile{ - BaseFile: &file.BaseFile{ + &models.ImageFile{ + BaseFile: &models.BaseFile{ ID: fileIDs[fileIdxStartImageFiles], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -341,7 +341,7 @@ func Test_fileStore_Update(t *testing.T) { ParentFolderID: folderIDs[folderIdxWithFiles], Basename: basename, Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -358,16 +358,16 @@ func Test_fileStore_Update(t *testing.T) { }, { "duplicate path", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ModTime: fileModTime, }, Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)), ParentFolderID: folderIDs[folderIdxWithFiles], Basename: getFileBaseName(fileIdxZip), Size: size, - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { Type: fingerprintType, Fingerprint: fingerprintValue, @@ -380,7 +380,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "clear zip", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxInZip], Path: getFilePath(folderIdxWithFiles, getFileBaseName(fileIdxZip)+".renamed"), Basename: getFileBaseName(fileIdxZip) + ".renamed", @@ -390,7 +390,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "clear folder", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, }, @@ -398,7 +398,7 @@ func Test_fileStore_Update(t *testing.T) { }, { "invalid parent folder id", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, ParentFolderID: invalidFolderID, @@ -407,10 +407,10 @@ func Test_fileStore_Update(t *testing.T) { }, { "invalid zip file id", - &file.BaseFile{ + &models.BaseFile{ ID: fileIDs[fileIdxZip], Path: basename, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, ParentFolderID: folderIDs[folderIdxWithFiles], @@ -450,7 +450,7 @@ func Test_fileStore_Update(t *testing.T) { } } -func makeFileWithID(index int) file.File { +func makeFileWithID(index int) models.File { ret := makeFile(index) ret.Base().Path = getFilePath(fileFolders[index], getFileBaseName(index)) ret.Base().ID = fileIDs[index] @@ -461,8 +461,8 @@ func makeFileWithID(index int) file.File { func Test_fileStore_Find(t *testing.T) { tests := []struct { name string - id file.ID - want file.File + id models.FileID + want models.File wantErr bool }{ { @@ -473,7 +473,7 @@ func Test_fileStore_Find(t *testing.T) { }, { "invalid", - file.ID(invalidID), + models.FileID(invalidID), nil, true, }, @@ -529,7 +529,7 @@ func Test_FileStore_FindByPath(t *testing.T) { tests := []struct { name string path string - want file.File + want models.File wantErr bool }{ { @@ -565,31 +565,31 @@ func Test_FileStore_FindByPath(t *testing.T) { func TestFileStore_FindByFingerprint(t *testing.T) { tests := []struct { name string - fp file.Fingerprint - want []file.File + fp models.Fingerprint + want []models.File wantErr bool }{ { "by MD5", - file.Fingerprint{ + models.Fingerprint{ Type: "MD5", Fingerprint: getPrefixedStringValue("file", fileIdxZip, "md5"), }, - []file.File{makeFileWithID(fileIdxZip)}, + []models.File{makeFileWithID(fileIdxZip)}, false, }, { "by OSHASH", - file.Fingerprint{ + models.Fingerprint{ Type: "OSHASH", Fingerprint: getPrefixedStringValue("file", fileIdxZip, "oshash"), }, - []file.File{makeFileWithID(fileIdxZip)}, + []models.File{makeFileWithID(fileIdxZip)}, false, }, { "non-existing", - file.Fingerprint{ + models.Fingerprint{ Type: "OSHASH", Fingerprint: "foo", }, @@ -617,7 +617,7 @@ func TestFileStore_FindByFingerprint(t *testing.T) { func TestFileStore_IsPrimary(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID want bool }{ { diff --git a/pkg/sqlite/fingerprint.go b/pkg/sqlite/fingerprint.go index 0f7c36d1274..49bae54caf9 100644 --- a/pkg/sqlite/fingerprint.go +++ b/pkg/sqlite/fingerprint.go @@ -6,7 +6,7 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) @@ -23,8 +23,8 @@ func (r fingerprintQueryRow) valid() bool { return r.Type.Valid } -func (r *fingerprintQueryRow) resolve() file.Fingerprint { - return file.Fingerprint{ +func (r *fingerprintQueryRow) resolve() models.Fingerprint { + return models.Fingerprint{ Type: r.Type.String, Fingerprint: r.Fingerprint, } @@ -45,7 +45,7 @@ var FingerprintReaderWriter = &fingerprintQueryBuilder{ tableMgr: fingerprintTableMgr, } -func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID file.ID, f file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID models.FileID, f models.Fingerprint) error { table := qb.table() q := dialect.Insert(table).Cols(fileIDColumn, "type", "fingerprint").Vals( goqu.Vals{fileID, f.Type, f.Fingerprint}, @@ -58,7 +58,7 @@ func (qb *fingerprintQueryBuilder) insert(ctx context.Context, fileID file.ID, f return nil } -func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID file.ID, f []file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID models.FileID, f []models.Fingerprint) error { for _, ff := range f { if err := qb.insert(ctx, fileID, ff); err != nil { return err @@ -68,7 +68,7 @@ func (qb *fingerprintQueryBuilder) insertJoins(ctx context.Context, fileID file. return nil } -func (qb *fingerprintQueryBuilder) replaceJoins(ctx context.Context, fileID file.ID, f []file.Fingerprint) error { +func (qb *fingerprintQueryBuilder) replaceJoins(ctx context.Context, fileID models.FileID, f []models.Fingerprint) error { if err := qb.destroy(ctx, []int{int(fileID)}); err != nil { return err } diff --git a/pkg/sqlite/folder.go b/pkg/sqlite/folder.go index ff1e8a2c559..26cbf896252 100644 --- a/pkg/sqlite/folder.go +++ b/pkg/sqlite/folder.go @@ -10,23 +10,23 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "gopkg.in/guregu/null.v4" ) const folderTable = "folders" type folderRow struct { - ID file.FolderID `db:"id" goqu:"skipinsert"` - Path string `db:"path"` - ZipFileID null.Int `db:"zip_file_id"` - ParentFolderID null.Int `db:"parent_folder_id"` - ModTime Timestamp `db:"mod_time"` - CreatedAt Timestamp `db:"created_at"` - UpdatedAt Timestamp `db:"updated_at"` + ID models.FolderID `db:"id" goqu:"skipinsert"` + Path string `db:"path"` + ZipFileID null.Int `db:"zip_file_id"` + ParentFolderID null.Int `db:"parent_folder_id"` + ModTime Timestamp `db:"mod_time"` + CreatedAt Timestamp `db:"created_at"` + UpdatedAt Timestamp `db:"updated_at"` } -func (r *folderRow) fromFolder(o file.Folder) { +func (r *folderRow) fromFolder(o models.Folder) { r.ID = o.ID r.Path = o.Path r.ZipFileID = nullIntFromFileIDPtr(o.ZipFileID) @@ -43,10 +43,10 @@ type folderQueryRow struct { ZipFolderPath null.String `db:"zip_folder_path"` } -func (r *folderQueryRow) resolve() *file.Folder { - ret := &file.Folder{ +func (r *folderQueryRow) resolve() *models.Folder { + ret := &models.Folder{ ID: r.ID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: nullIntFileIDPtr(r.ZipFileID), ModTime: r.ModTime.Timestamp, }, @@ -57,7 +57,7 @@ func (r *folderQueryRow) resolve() *file.Folder { } if ret.ZipFileID != nil && r.ZipFolderPath.Valid && r.ZipBasename.Valid { - ret.ZipFile = &file.BaseFile{ + ret.ZipFile = &models.BaseFile{ ID: *ret.ZipFileID, Path: filepath.Join(r.ZipFolderPath.String, r.ZipBasename.String), Basename: r.ZipBasename.String, @@ -69,8 +69,8 @@ func (r *folderQueryRow) resolve() *file.Folder { type folderQueryRows []folderQueryRow -func (r folderQueryRows) resolve() []*file.Folder { - var ret []*file.Folder +func (r folderQueryRows) resolve() []*models.Folder { + var ret []*models.Folder for _, row := range r { f := row.resolve() @@ -97,7 +97,7 @@ func NewFolderStore() *FolderStore { } } -func (qb *FolderStore) Create(ctx context.Context, f *file.Folder) error { +func (qb *FolderStore) Create(ctx context.Context, f *models.Folder) error { var r folderRow r.fromFolder(*f) @@ -107,12 +107,12 @@ func (qb *FolderStore) Create(ctx context.Context, f *file.Folder) error { } // only assign id once we are successful - f.ID = file.FolderID(id) + f.ID = models.FolderID(id) return nil } -func (qb *FolderStore) Update(ctx context.Context, updatedObject *file.Folder) error { +func (qb *FolderStore) Update(ctx context.Context, updatedObject *models.Folder) error { var r folderRow r.fromFolder(*updatedObject) @@ -123,7 +123,7 @@ func (qb *FolderStore) Update(ctx context.Context, updatedObject *file.Folder) e return nil } -func (qb *FolderStore) Destroy(ctx context.Context, id file.FolderID) error { +func (qb *FolderStore) Destroy(ctx context.Context, id models.FolderID) error { return qb.tableMgr.destroyExisting(ctx, []int{int(id)}) } @@ -179,7 +179,7 @@ func (qb *FolderStore) countDataset() *goqu.SelectDataset { ) } -func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*file.Folder, error) { +func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.Folder, error) { ret, err := qb.getMany(ctx, q) if err != nil { return nil, err @@ -192,7 +192,7 @@ func (qb *FolderStore) get(ctx context.Context, q *goqu.SelectDataset) (*file.Fo return ret[0], nil } -func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*file.Folder, error) { +func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*models.Folder, error) { const single = false var rows folderQueryRows if err := queryFunc(ctx, q, single, func(r *sqlx.Rows) error { @@ -210,7 +210,7 @@ func (qb *FolderStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*f return rows.resolve(), nil } -func (qb *FolderStore) Find(ctx context.Context, id file.FolderID) (*file.Folder, error) { +func (qb *FolderStore) Find(ctx context.Context, id models.FolderID) (*models.Folder, error) { q := qb.selectDataset().Where(qb.tableMgr.byID(id)) ret, err := qb.get(ctx, q) @@ -221,7 +221,7 @@ func (qb *FolderStore) Find(ctx context.Context, id file.FolderID) (*file.Folder return ret, nil } -func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*file.Folder, error) { +func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*models.Folder, error) { q := qb.selectDataset().Prepared(true).Where(qb.table().Col("path").Eq(p)) ret, err := qb.get(ctx, q) @@ -232,7 +232,7 @@ func (qb *FolderStore) FindByPath(ctx context.Context, p string) (*file.Folder, return ret, nil } -func (qb *FolderStore) FindByParentFolderID(ctx context.Context, parentFolderID file.FolderID) ([]*file.Folder, error) { +func (qb *FolderStore) FindByParentFolderID(ctx context.Context, parentFolderID models.FolderID) ([]*models.Folder, error) { q := qb.selectDataset().Where(qb.table().Col("parent_folder_id").Eq(int(parentFolderID))) ret, err := qb.getMany(ctx, q) @@ -261,7 +261,7 @@ func (qb *FolderStore) allInPaths(q *goqu.SelectDataset, p []string) *goqu.Selec // FindAllInPaths returns the all folders that are or are within any of the given paths. // Returns all if limit is < 0. // Returns all folders if p is empty. -func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*file.Folder, error) { +func (qb *FolderStore) FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*models.Folder, error) { q := qb.selectDataset().Prepared(true) q = qb.allInPaths(q, p) @@ -300,7 +300,7 @@ func (qb *FolderStore) CountAllInPaths(ctx context.Context, p []string) (int, er // return qb.getMany(ctx, q) // } -func (qb *FolderStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*file.Folder, error) { +func (qb *FolderStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Folder, error) { table := qb.table() q := qb.selectDataset().Prepared(true).Where( diff --git a/pkg/sqlite/folder_test.go b/pkg/sqlite/folder_test.go index 71e45305a09..1d948d06368 100644 --- a/pkg/sqlite/folder_test.go +++ b/pkg/sqlite/folder_test.go @@ -9,13 +9,13 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" + "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) var ( - invalidFolderID = file.FolderID(invalidID) - invalidFileID = file.ID(invalidID) + invalidFolderID = models.FolderID(invalidID) + invalidFileID = models.FileID(invalidID) ) func Test_FolderStore_Create(t *testing.T) { @@ -28,13 +28,13 @@ func Test_FolderStore_Create(t *testing.T) { tests := []struct { name string - newObject file.Folder + newObject models.Folder wantErr bool }{ { "full", - file.Folder{ - DirEntry: file.DirEntry{ + models.Folder{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -47,7 +47,7 @@ func Test_FolderStore_Create(t *testing.T) { }, { "invalid parent folder id", - file.Folder{ + models.Folder{ Path: path, ParentFolderID: &invalidFolderID, }, @@ -55,8 +55,8 @@ func Test_FolderStore_Create(t *testing.T) { }, { "invalid zip file id", - file.Folder{ - DirEntry: file.DirEntry{ + models.Folder{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Path: path, @@ -109,14 +109,14 @@ func Test_FolderStore_Update(t *testing.T) { tests := []struct { name string - updatedObject *file.Folder + updatedObject *models.Folder wantErr bool }{ { "full", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &fileIDs[fileIdxZip], ZipFile: makeZipFileWithID(fileIdxZip), ModTime: fileModTime, @@ -129,7 +129,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "clear zip", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxInZip], Path: path, }, @@ -137,7 +137,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "clear folder", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], Path: path, }, @@ -145,7 +145,7 @@ func Test_FolderStore_Update(t *testing.T) { }, { "invalid parent folder id", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], Path: path, ParentFolderID: &invalidFolderID, @@ -154,9 +154,9 @@ func Test_FolderStore_Update(t *testing.T) { }, { "invalid zip file id", - &file.Folder{ + &models.Folder{ ID: folderIDs[folderIdxWithParentFolder], - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ ZipFileID: &invalidFileID, }, Path: path, @@ -192,7 +192,7 @@ func Test_FolderStore_Update(t *testing.T) { } } -func makeFolderWithID(index int) *file.Folder { +func makeFolderWithID(index int) *models.Folder { ret := makeFolder(index) ret.ID = folderIDs[index] @@ -207,7 +207,7 @@ func Test_FolderStore_FindByPath(t *testing.T) { tests := []struct { name string path string - want *file.Folder + want *models.Folder wantErr bool }{ { diff --git a/pkg/sqlite/gallery.go b/pkg/sqlite/gallery.go index b7ece948d74..7bdf98bd31a 100644 --- a/pkg/sqlite/gallery.go +++ b/pkg/sqlite/gallery.go @@ -11,7 +11,6 @@ import ( "github.com/doug-martin/goqu/v9" "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "gopkg.in/guregu/null.v4" @@ -163,7 +162,7 @@ func (qb *GalleryStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, fileIDs []file.ID) error { +func (qb *GalleryStore) Create(ctx context.Context, newObject *models.Gallery, fileIDs []models.FileID) error { var r galleryRow r.fromGallery(*newObject) @@ -230,7 +229,7 @@ func (qb *GalleryStore) Update(ctx context.Context, updatedObject *models.Galler } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.Base().ID } @@ -287,7 +286,7 @@ func (qb *GalleryStore) Destroy(ctx context.Context, id int) error { return qb.tableMgr.destroyExisting(ctx, []int{id}) } -func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, error) { +func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -299,13 +298,13 @@ func (qb *GalleryStore) GetFiles(ctx context.Context, id int) ([]file.File, erro return nil, err } - ret := make([]file.File, len(files)) + ret := make([]models.File, len(files)) copy(ret, files) return ret, nil } -func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *GalleryStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } @@ -412,7 +411,7 @@ func (qb *GalleryStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]* return ret, nil } -func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Gallery, error) { sq := dialect.From(galleriesFilesJoinTable).Select(galleriesFilesJoinTable.Col(galleryIDColumn)).Where( galleriesFilesJoinTable.Col(fileIDColumn).Eq(fileID), ) @@ -425,14 +424,14 @@ func (qb *GalleryStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mo return ret, nil } -func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *GalleryStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := galleriesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Gallery, error) { fingerprintTable := fingerprintTableMgr.table var ex []exp.Expression @@ -460,20 +459,20 @@ func (qb *GalleryStore) FindByFingerprints(ctx context.Context, fp []file.Finger } func (qb *GalleryStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Gallery, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) } func (qb *GalleryStore) FindByChecksums(ctx context.Context, checksums []string) ([]*models.Gallery, error) { - fingerprints := make([]file.Fingerprint, len(checksums)) + fingerprints := make([]models.Fingerprint, len(checksums)) for i, c := range checksums { - fingerprints[i] = file.Fingerprint{ - Type: file.FingerprintTypeMD5, + fingerprints[i] = models.Fingerprint{ + Type: models.FingerprintTypeMD5, Fingerprint: c, } } @@ -519,7 +518,7 @@ func (qb *GalleryStore) FindByPath(ctx context.Context, p string) ([]*models.Gal return ret, nil } -func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Gallery, error) { +func (qb *GalleryStore) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Gallery, error) { table := qb.table() sq := dialect.From(table).Select(table.Col(idColumn)).Where( @@ -1118,9 +1117,9 @@ func (qb *GalleryStore) filesRepository() *filesRepository { } } -func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *GalleryStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return galleriesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *GalleryStore) performersRepository() *joinRepository { diff --git a/pkg/sqlite/gallery_test.go b/pkg/sqlite/gallery_test.go index d33d5ba2a96..c8dbe02762f 100644 --- a/pkg/sqlite/gallery_test.go +++ b/pkg/sqlite/gallery_test.go @@ -10,7 +10,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -97,7 +96,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ galleryFile, }), CreatedAt: createdAt, @@ -145,9 +144,9 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { assert := assert.New(t) s := tt.newObject - var fileIDs []file.ID + var fileIDs []models.FileID if s.Files.Loaded() { - fileIDs = []file.ID{s.Files.List()[0].Base().ID} + fileIDs = []models.FileID{s.Files.List()[0].Base().ID} } if err := qb.Create(ctx, &s, fileIDs); (err != nil) != tt.wantErr { @@ -195,7 +194,7 @@ func Test_galleryQueryBuilder_Create(t *testing.T) { } } -func makeGalleryFileWithID(i int) *file.BaseFile { +func makeGalleryFileWithID(i int) *models.BaseFile { ret := makeGalleryFile(i) ret.ID = galleryFileIDs[i] return ret @@ -229,7 +228,7 @@ func Test_galleryQueryBuilder_Update(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFileWithID(galleryIdxWithScene), }), CreatedAt: createdAt, @@ -449,7 +448,7 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) { Rating: &rating, Organized: true, StudioID: &studioIDs[studioIdxWithGallery], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFile(galleryIdxWithImage), }), CreatedAt: createdAt, @@ -466,7 +465,7 @@ func Test_galleryQueryBuilder_UpdatePartial(t *testing.T) { clearGalleryPartial(), models.Gallery{ ID: galleryIDs[galleryIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeGalleryFile(galleryIdxWithImage), }), SceneIDs: models.NewRelatedIDs([]int{}), @@ -844,7 +843,7 @@ func makeGalleryWithID(index int) *models.Gallery { ret := makeGallery(index, includeScenes) ret.ID = galleryIDs[index] - ret.Files = models.NewRelatedFiles([]file.File{makeGalleryFile(index)}) + ret.Files = models.NewRelatedFiles([]models.File{makeGalleryFile(index)}) return ret } @@ -1281,7 +1280,7 @@ func galleriesToIDs(i []*models.Gallery) []int { func Test_galleryStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1330,7 +1329,7 @@ func Test_galleryStore_FindByFileID(t *testing.T) { func Test_galleryStore_FindByFolderID(t *testing.T) { tests := []struct { name string - folderID file.FolderID + folderID models.FolderID include []int exclude []int }{ diff --git a/pkg/sqlite/image.go b/pkg/sqlite/image.go index 20e7801d8bc..35982642695 100644 --- a/pkg/sqlite/image.go +++ b/pkg/sqlite/image.go @@ -8,7 +8,6 @@ import ( "path/filepath" "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "gopkg.in/guregu/null.v4" @@ -150,7 +149,7 @@ func (qb *ImageStore) selectDataset() *goqu.SelectDataset { checksum, goqu.On( checksum.Col(fileIDColumn).Eq(imagesFilesJoinTable.Col(fileIDColumn)), - checksum.Col("type").Eq(file.FingerprintTypeMD5), + checksum.Col("type").Eq(models.FingerprintTypeMD5), ), ).Select( qb.table().All(), @@ -271,7 +270,7 @@ func (qb *ImageStore) Update(ctx context.Context, updatedObject *models.Image) e } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.Base().ID } @@ -389,7 +388,7 @@ func (qb *ImageStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } -func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]file.File, error) { +func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]models.File, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -404,12 +403,12 @@ func (qb *ImageStore) GetFiles(ctx context.Context, id int) ([]file.File, error) return files, nil } -func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *ImageStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } -func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Image, error) { +func (qb *ImageStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Image, error) { table := qb.table() sq := dialect.From(table). @@ -427,14 +426,14 @@ func (qb *ImageStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode return ret, nil } -func (qb *ImageStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *ImageStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := imagesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Image, error) { +func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Image, error) { table := qb.table() fingerprintTable := fingerprintTableMgr.table @@ -467,9 +466,9 @@ func (qb *ImageStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr } func (qb *ImageStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) @@ -523,7 +522,7 @@ func (qb *ImageStore) OCountByPerformerID(ctx context.Context, performerID int) return ret, nil } -func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID) ([]*models.Image, error) { +func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID models.FolderID) ([]*models.Image, error) { table := qb.table() fileTable := goqu.T(fileTable) @@ -548,7 +547,7 @@ func (qb *ImageStore) FindByFolderID(ctx context.Context, folderID file.FolderID return ret, nil } -func (qb *ImageStore) FindByZipFileID(ctx context.Context, zipFileID file.ID) ([]*models.Image, error) { +func (qb *ImageStore) FindByZipFileID(ctx context.Context, zipFileID models.FileID) ([]*models.Image, error) { table := qb.table() fileTable := goqu.T(fileTable) @@ -1043,9 +1042,9 @@ func (qb *ImageStore) filesRepository() *filesRepository { } } -func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *ImageStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return imagesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *ImageStore) GetGalleryIDs(ctx context.Context, imageID int) ([]int, error) { diff --git a/pkg/sqlite/image_test.go b/pkg/sqlite/image_test.go index 4f3ebcc22ce..900c0b79456 100644 --- a/pkg/sqlite/image_test.go +++ b/pkg/sqlite/image_test.go @@ -10,7 +10,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stretchr/testify/assert" ) @@ -97,8 +96,8 @@ func Test_imageQueryBuilder_Create(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ - imageFile.(*file.ImageFile), + Files: models.NewRelatedFiles([]models.File{ + imageFile.(*models.ImageFile), }), PrimaryFileID: &imageFile.Base().ID, Path: imageFile.Base().Path, @@ -146,7 +145,7 @@ func Test_imageQueryBuilder_Create(t *testing.T) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) - var fileIDs []file.ID + var fileIDs []models.FileID if tt.newObject.Files.Loaded() { for _, f := range tt.newObject.Files.List() { fileIDs = append(fileIDs, f.Base().ID) @@ -205,7 +204,7 @@ func clearImageFileIDs(image *models.Image) { } } -func makeImageFileWithID(i int) *file.ImageFile { +func makeImageFileWithID(i int) *models.ImageFile { ret := makeImageFile(i) ret.ID = imageFileIDs[i] return ret @@ -444,7 +443,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithImage], - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeImageFile(imageIdx1WithGallery), }), CreatedAt: createdAt, @@ -462,7 +461,7 @@ func Test_imageQueryBuilder_UpdatePartial(t *testing.T) { models.Image{ ID: imageIDs[imageIdx1WithGallery], OCounter: getOCounter(imageIdx1WithGallery), - Files: models.NewRelatedFiles([]file.File{ + Files: models.NewRelatedFiles([]models.File{ makeImageFile(imageIdx1WithGallery), }), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -965,7 +964,7 @@ func makeImageWithID(index int) *models.Image { ret := makeImage(index) ret.ID = imageIDs[index] - ret.Files = models.NewRelatedFiles([]file.File{makeImageFile(index)}) + ret.Files = models.NewRelatedFiles([]models.File{makeImageFile(index)}) return ret } @@ -1153,15 +1152,15 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { tests := []struct { name string - fingerprints []file.Fingerprint + fingerprints []models.Fingerprint want []*models.Image wantErr bool }{ { "valid", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithGallery), }, }, @@ -1170,9 +1169,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "invalid", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: "invalid checksum", }, }, @@ -1181,9 +1180,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "with performers", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithTwoPerformers), }, }, @@ -1192,9 +1191,9 @@ func Test_imageQueryBuilder_FindByFingerprints(t *testing.T) { }, { "with tags", - []file.Fingerprint{ + []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getChecksum(imageIdxWithTwoTags), }, }, @@ -1316,7 +1315,7 @@ func imagesToIDs(i []*models.Image) []int { func Test_imageStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1365,7 +1364,7 @@ func Test_imageStore_FindByFileID(t *testing.T) { func Test_imageStore_FindByFolderID(t *testing.T) { tests := []struct { name string - folderID file.FolderID + folderID models.FolderID include []int exclude []int }{ @@ -1420,7 +1419,7 @@ func Test_imageStore_FindByFolderID(t *testing.T) { func Test_imageStore_FindByZipFileID(t *testing.T) { tests := []struct { name string - zipFileID file.ID + zipFileID models.FileID include []int exclude []int }{ @@ -1868,11 +1867,12 @@ func verifyImagesResolution(t *testing.T, resolution models.ResolutionEnum) { t.Errorf("Error loading primary file: %s", err.Error()) return nil } - asFrame, ok := image.Files.Primary().(file.VisualFile) + f := image.Files.Primary() + vf, ok := f.(models.VisualFile) if !ok { - t.Errorf("Error: Associated primary file of image is not of type VisualFile") + t.Errorf("Error: image primary file is not a visual file (is type %T)", f) } - verifyImageResolution(t, asFrame.GetHeight(), resolution) + verifyImageResolution(t, vf.GetHeight(), resolution) } return nil diff --git a/pkg/sqlite/migrations/49_postmigrate.go b/pkg/sqlite/migrations/49_postmigrate.go new file mode 100644 index 00000000000..d500d470763 --- /dev/null +++ b/pkg/sqlite/migrations/49_postmigrate.go @@ -0,0 +1,417 @@ +package migrations + +import ( + "context" + "encoding/json" + "fmt" + "reflect" + "strconv" + "strings" + + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" + "github.com/stashapp/stash/pkg/models" + "github.com/stashapp/stash/pkg/sqlite" +) + +var migrate49TypeResolution = map[string][]string{ + "Boolean": { + /* + "organized", + "interactive", + "ignore_auto_tag", + "performer_favorite", + "filter_favorites", + */ + }, + "Int": { + "id", + "rating", + "rating100", + "o_counter", + "duration", + "tag_count", + "age", + "height", + "height_cm", + "weight", + "scene_count", + "marker_count", + "image_count", + "gallery_count", + "performer_count", + "interactive_speed", + "resume_time", + "play_count", + "play_duration", + "parent_count", + "child_count", + "performer_age", + "file_count", + }, + "Float": { + "penis_length", + }, + "Object": { + "tags", + "performers", + "studios", + "movies", + "galleries", + "parents", + "children", + "scene_tags", + "performer_tags", + }, +} +var migrate49NameChanges = map[string]string{ + "rating": "rating100", + "parent_studios": "parents", + "child_studios": "children", + "parent_tags": "parents", + "child_tags": "children", + "child_tag_count": "child_count", + "parent_tag_count": "parent_count", + "height": "height_cm", + "imageIsMissing": "is_missing", + "sceneIsMissing": "is_missing", + "galleryIsMissing": "is_missing", + "performerIsMissing": "is_missing", + "tagIsMissing": "is_missing", + "studioIsMissing": "is_missing", + "movieIsMissing": "is_missing", + "favorite": "filter_favorites", + "hasMarkers": "has_markers", + "parentTags": "parents", + "childTags": "children", + "phash": "phash_distance", + "scene_code": "code", + "hasChapters": "has_chapters", + "sceneChecksum": "checksum", + "galleryChecksum": "checksum", + "sceneTags": "scene_tags", + "performerTags": "performer_tags", +} + +func post49(ctx context.Context, db *sqlx.DB) error { + logger.Info("Running post-migration for schema version 49") + + m := schema49Migrator{ + migrator: migrator{ + db: db, + }, + } + + return m.migrateSavedFilters(ctx) +} + +type schema49Migrator struct { + migrator +} + +func (m *schema49Migrator) migrateSavedFilters(ctx context.Context) error { + if err := m.withTxn(ctx, func(tx *sqlx.Tx) error { + rows, err := m.db.Query("SELECT id, mode, find_filter FROM saved_filters ORDER BY id") + if err != nil { + return err + } + defer rows.Close() + + for rows.Next() { + var ( + id int + mode models.FilterMode + findFilter string + ) + + err := rows.Scan(&id, &mode, &findFilter) + if err != nil { + return err + } + + asRawMessage := json.RawMessage(findFilter) + + newFindFilter, err := m.getFindFilter(asRawMessage) + if err != nil { + return fmt.Errorf("failed to get find filter for saved filter %d: %w", id, err) + } + + objectFilter, err := m.getObjectFilter(mode, asRawMessage) + if err != nil { + return fmt.Errorf("failed to get object filter for saved filter %d: %w", id, err) + } + + uiOptions, err := m.getDisplayOptions(asRawMessage) + if err != nil { + return fmt.Errorf("failed to get display options for saved filter %d: %w", id, err) + } + + _, err = m.db.Exec("UPDATE saved_filters SET find_filter = ?, object_filter = ?, ui_options = ? WHERE id = ?", newFindFilter, objectFilter, uiOptions, id) + if err != nil { + return fmt.Errorf("failed to update saved filter %d: %w", id, err) + } + } + + return rows.Err() + }); err != nil { + return err + } + + return nil +} + +func (m *schema49Migrator) getDisplayOptions(data json.RawMessage) (json.RawMessage, error) { + type displayOptions struct { + DisplayMode *int `json:"disp"` + ZoomIndex *int `json:"z"` + } + + var opts displayOptions + if err := json.Unmarshal(data, &opts); err != nil { + return nil, fmt.Errorf("failed to unmarshal display options: %w", err) + } + + ret := make(map[string]interface{}) + if opts.DisplayMode != nil { + ret["display_mode"] = *opts.DisplayMode + } + if opts.ZoomIndex != nil { + ret["zoom_index"] = *opts.ZoomIndex + } + + return json.Marshal(ret) +} + +func (m *schema49Migrator) getFindFilter(data json.RawMessage) (json.RawMessage, error) { + type findFilterJson struct { + Q *string `json:"q"` + Page *int `json:"page"` + PerPage *int `json:"perPage"` + Sort *string `json:"sortby"` + Direction *string `json:"sortdir"` + } + + ppDefault := 40 + pageDefault := 1 + qDefault := "" + sortDefault := "date" + asc := "asc" + ff := findFilterJson{Q: &qDefault, Page: &pageDefault, PerPage: &ppDefault, Sort: &sortDefault, Direction: &asc} + if err := json.Unmarshal(data, &ff); err != nil { + return nil, fmt.Errorf("failed to unmarshal find filter: %w", err) + } + + newDir := strings.ToUpper(*ff.Direction) + ff.Direction = &newDir + + type findFilterRewrite struct { + Q *string `json:"q"` + Page *int `json:"page"` + PerPage *int `json:"per_page"` + Sort *string `json:"sort"` + Direction *string `json:"direction"` + } + + fr := findFilterRewrite(ff) + + return json.Marshal(fr) +} + +func (m *schema49Migrator) getObjectFilter(mode models.FilterMode, data json.RawMessage) (json.RawMessage, error) { + type criteriaJson struct { + Criteria []string `json:"c"` + } + + var c criteriaJson + if err := json.Unmarshal(data, &c); err != nil { + return nil, fmt.Errorf("failed to unmarshal object filter: %w", err) + } + + ret := make(map[string]interface{}) + for _, raw := range c.Criteria { + if err := m.convertCriterion(mode, ret, raw); err != nil { + return nil, err + } + } + + return json.Marshal(ret) +} + +func (m *schema49Migrator) convertCriterion(mode models.FilterMode, out map[string]interface{}, criterion string) error { + // convert to a map + ret := make(map[string]interface{}) + + if err := json.Unmarshal([]byte(criterion), &ret); err != nil { + return fmt.Errorf("failed to unmarshal criterion: %w", err) + } + + field := ret["type"].(string) + // Some names are deprecated + if newFieldName, ok := migrate49NameChanges[field]; ok { + field = newFieldName + } + delete(ret, "type") + + // Find out whether the object needs some adjustment/has non-string content attached + // Only adjust if value is present + if v, ok := ret["value"]; ok && v != nil { + var err error + switch { + case arrayContains(migrate49TypeResolution["Boolean"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "bool") + case arrayContains(migrate49TypeResolution["Int"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "int") + case arrayContains(migrate49TypeResolution["Float"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "float64") + case arrayContains(migrate49TypeResolution["Object"], field): + ret["value"], err = m.adjustCriterionValue(ret["value"], "object") + } + + if err != nil { + return fmt.Errorf("failed to adjust criterion value for %q: %w", field, err) + } + } + + out[field] = ret + + return nil +} + +func arrayContains(sl []string, name string) bool { + for _, value := range sl { + if value == name { + return true + } + } + return false +} + +// General Function for converting the types inside a criterion +func (m *schema49Migrator) adjustCriterionValue(value interface{}, typ string) (interface{}, error) { + if mapvalue, ok := value.(map[string]interface{}); ok { + // Primitive values and lists of them + var err error + for _, next := range []string{"value", "value2"} { + if valmap, ok := mapvalue[next].([]string); ok { + var valNewMap []interface{} + for index, v := range valmap { + valNewMap[index], err = m.convertValue(v, typ) + if err != nil { + return nil, err + } + } + mapvalue[next] = valNewMap + } else if _, ok := mapvalue[next]; ok { + mapvalue[next], err = m.convertValue(mapvalue[next], typ) + if err != nil { + return nil, err + } + } + } + // Items + for _, next := range []string{"items", "excluded"} { + if _, ok := mapvalue[next]; ok { + mapvalue[next], err = m.adjustCriterionItem(mapvalue[next]) + if err != nil { + return nil, err + } + } + } + + // Those Values are always Int + for _, next := range []string{"Distance", "Depth"} { + if _, ok := mapvalue[next]; ok { + mapvalue[next], err = strconv.ParseInt(mapvalue[next].(string), 10, 64) + if err != nil { + return nil, err + } + } + } + return mapvalue, nil + } else if _, ok := value.(string); ok { + // Singular Primitive Values + return m.convertValue(value, typ) + } else if listvalue, ok := value.([]interface{}); ok { + // Items as a singular value, as well as singular lists + var err error + if typ == "object" { + value, err = m.adjustCriterionItem(value) + if err != nil { + return nil, err + } + } else { + for index, val := range listvalue { + listvalue[index], err = m.convertValue(val, typ) + if err != nil { + return nil, err + } + } + value = listvalue + } + + return value, nil + } else if _, ok := value.(int); ok { + return value, nil + } + + return nil, fmt.Errorf("could not recognize format of value %v", value) +} + +// Converts values inside a criterion that represent some objects, like performer or studio. +func (m *schema49Migrator) adjustCriterionItem(value interface{}) (interface{}, error) { + // Basically, this first converts step by step the value, after that it adjusts id and Depth (of parent/child studios) to int + if itemlist, ok := value.([]interface{}); ok { + var itemNewList []interface{} + for _, val := range itemlist { + if val, ok := val.(map[string]interface{}); ok { + newItem := make(map[string]interface{}) + for index, v := range val { + if v, ok := v.(string); ok { + switch index { + case "id": + if formattedOut, ok := strconv.ParseInt(v, 10, 64); ok == nil { + newItem["id"] = formattedOut + } + case "Depth": + if formattedOut, ok := strconv.ParseInt(v, 10, 64); ok == nil { + newItem["Depth"] = formattedOut + } + default: + newItem[index] = v + } + } + } + itemNewList = append(itemNewList, newItem) + } + } + return itemNewList, nil + } + return nil, fmt.Errorf("could not recognize %v as an item list", value) +} + +// Converts a value of type string to its according type, given by string +func (m *schema49Migrator) convertValue(value interface{}, typ string) (interface{}, error) { + valueType := reflect.TypeOf(value).Name() + if typ == valueType || (typ == "int" && valueType == "float64") || (typ == "float64" && valueType == "int") { + return value, nil + } + + if val, ok := value.(string); ok { + switch typ { + case "float64": + return strconv.ParseFloat(val, 64) + case "int": + return strconv.ParseInt(val, 10, 64) + case "bool": + return strconv.ParseBool(val) + default: + return nil, fmt.Errorf("no valid conversion type for %v, need bool, int or float64", typ) + } + } + + return nil, fmt.Errorf("cannot convert %v (%T) to %s", value, value, typ) +} + +func init() { + sqlite.RegisterPostMigration(49, post49) +} diff --git a/pkg/sqlite/migrations/49_saved_filter_refactor.up.sql b/pkg/sqlite/migrations/49_saved_filter_refactor.up.sql new file mode 100644 index 00000000000..c769a9e4b8e --- /dev/null +++ b/pkg/sqlite/migrations/49_saved_filter_refactor.up.sql @@ -0,0 +1,34 @@ +PRAGMA foreign_keys=OFF; + +-- remove filter column +CREATE TABLE `saved_filters_new` ( + `id` integer not null primary key autoincrement, + `name` varchar(510) not null, + `mode` varchar(255) not null, + `find_filter` blob, + `object_filter` blob, + `ui_options` blob +); + +-- move filter data into find_filter to be migrated in the post-migration +INSERT INTO `saved_filters_new` + ( + `id`, + `name`, + `mode`, + `find_filter` + ) + SELECT + `id`, + `name`, + `mode`, + `filter` + FROM `saved_filters`; + +DROP INDEX `index_saved_filters_on_mode_name_unique`; +DROP TABLE `saved_filters`; +ALTER TABLE `saved_filters_new` rename to `saved_filters`; + +CREATE UNIQUE INDEX `index_saved_filters_on_mode_name_unique` on `saved_filters` (`mode`, `name`); + +PRAGMA foreign_keys=ON; diff --git a/pkg/sqlite/movies_test.go b/pkg/sqlite/movies_test.go index ed0ef724291..9b9615fbd90 100644 --- a/pkg/sqlite/movies_test.go +++ b/pkg/sqlite/movies_test.go @@ -291,7 +291,7 @@ func TestMovieUpdateFrontImage(t *testing.T) { // create movie to test against const name = "TestMovieUpdateMovieImages" movie := models.Movie{ - Name: name, + Name: name, } err := qb.Create(ctx, &movie) if err != nil { @@ -311,7 +311,7 @@ func TestMovieUpdateBackImage(t *testing.T) { // create movie to test against const name = "TestMovieUpdateMovieImages" movie := models.Movie{ - Name: name, + Name: name, } err := qb.Create(ctx, &movie) if err != nil { diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index 2292e868a62..c65965fe787 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -9,7 +9,6 @@ import ( "github.com/jmoiron/sqlx" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" ) @@ -336,7 +335,7 @@ type captionRepository struct { repository } -func (r *captionRepository) get(ctx context.Context, id file.ID) ([]*models.VideoCaption, error) { +func (r *captionRepository) get(ctx context.Context, id models.FileID) ([]*models.VideoCaption, error) { query := fmt.Sprintf("SELECT %s, %s, %s from %s WHERE %s = ?", captionCodeColumn, captionFilenameColumn, captionTypeColumn, r.tableName, r.idColumn) var ret []*models.VideoCaption err := r.queryFunc(ctx, query, []interface{}{id}, false, func(rows *sqlx.Rows) error { @@ -359,12 +358,12 @@ func (r *captionRepository) get(ctx context.Context, id file.ID) ([]*models.Vide return ret, err } -func (r *captionRepository) insert(ctx context.Context, id file.ID, caption *models.VideoCaption) (sql.Result, error) { +func (r *captionRepository) insert(ctx context.Context, id models.FileID, caption *models.VideoCaption) (sql.Result, error) { stmt := fmt.Sprintf("INSERT INTO %s (%s, %s, %s, %s) VALUES (?, ?, ?, ?)", r.tableName, r.idColumn, captionCodeColumn, captionFilenameColumn, captionTypeColumn) return r.tx.Exec(ctx, stmt, id, caption.LanguageCode, caption.Filename, caption.CaptionType) } -func (r *captionRepository) replace(ctx context.Context, id file.ID, captions []*models.VideoCaption) error { +func (r *captionRepository) replace(ctx context.Context, id models.FileID, captions []*models.VideoCaption) error { if err := r.destroy(ctx, []int{int(id)}); err != nil { return err } @@ -443,12 +442,12 @@ type filesRepository struct { } type relatedFileRow struct { - ID int `db:"id"` - FileID file.ID `db:"file_id"` - Primary bool `db:"primary"` + ID int `db:"id"` + FileID models.FileID `db:"file_id"` + Primary bool `db:"primary"` } -func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]file.ID, error) { +func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bool) ([][]models.FileID, error) { var primaryClause string if primaryOnly { primaryClause = " AND `primary` = 1" @@ -476,7 +475,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo return nil, err } - ret := make([][]file.ID, len(ids)) + ret := make([][]models.FileID, len(ids)) idToIndex := make(map[int]int) for i, id := range ids { idToIndex[id] = i @@ -488,7 +487,7 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo if row.Primary { // prepend to list - ret[idToIndex[id]] = append([]file.ID{fileID}, ret[idToIndex[id]]...) + ret[idToIndex[id]] = append([]models.FileID{fileID}, ret[idToIndex[id]]...) } else { ret[idToIndex[id]] = append(ret[idToIndex[id]], row.FileID) } @@ -497,15 +496,15 @@ func (r *filesRepository) getMany(ctx context.Context, ids []int, primaryOnly bo return ret, nil } -func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) { +func (r *filesRepository) get(ctx context.Context, id int) ([]models.FileID, error) { query := fmt.Sprintf("SELECT file_id, `primary` from %s WHERE %s = ?", r.tableName, r.idColumn) type relatedFile struct { - FileID file.ID `db:"file_id"` - Primary bool `db:"primary"` + FileID models.FileID `db:"file_id"` + Primary bool `db:"primary"` } - var ret []file.ID + var ret []models.FileID if err := r.queryFunc(ctx, query, []interface{}{id}, false, func(rows *sqlx.Rows) error { var f relatedFile @@ -515,7 +514,7 @@ func (r *filesRepository) get(ctx context.Context, id int) ([]file.ID, error) { if f.Primary { // prepend to list - ret = append([]file.ID{f.FileID}, ret...) + ret = append([]models.FileID{f.FileID}, ret...) } else { ret = append(ret, f.FileID) } diff --git a/pkg/sqlite/saved_filter.go b/pkg/sqlite/saved_filter.go index f4b55fe72ef..6b92b7657b4 100644 --- a/pkg/sqlite/saved_filter.go +++ b/pkg/sqlite/saved_filter.go @@ -3,6 +3,7 @@ package sqlite import ( "context" "database/sql" + "encoding/json" "errors" "fmt" @@ -10,6 +11,7 @@ import ( "github.com/doug-martin/goqu/v9/exp" "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" ) @@ -20,25 +22,67 @@ const ( ) type savedFilterRow struct { - ID int `db:"id" goqu:"skipinsert"` - Mode string `db:"mode"` - Name string `db:"name"` - Filter string `db:"filter"` + ID int `db:"id" goqu:"skipinsert"` + Mode models.FilterMode `db:"mode"` + Name string `db:"name"` + FindFilter string `db:"find_filter"` + ObjectFilter string `db:"object_filter"` + UIOptions string `db:"ui_options"` +} + +func encodeJSONOrEmpty(v interface{}) string { + if v == nil { + return "" + } + + encoded, err := json.Marshal(v) + if err != nil { + logger.Errorf("error encoding json %v: %v", v, err) + } + + return string(encoded) +} + +func decodeJSON(s string, v interface{}) { + if s == "" { + return + } + + if err := json.Unmarshal([]byte(s), v); err != nil { + logger.Errorf("error decoding json %q: %v", s, err) + } } func (r *savedFilterRow) fromSavedFilter(o models.SavedFilter) { r.ID = o.ID - r.Mode = string(o.Mode) + r.Mode = o.Mode r.Name = o.Name - r.Filter = o.Filter + + // encode the filters as json + r.FindFilter = encodeJSONOrEmpty(o.FindFilter) + r.ObjectFilter = encodeJSONOrEmpty(o.ObjectFilter) + r.UIOptions = encodeJSONOrEmpty(o.UIOptions) } func (r *savedFilterRow) resolve() *models.SavedFilter { ret := &models.SavedFilter{ - ID: r.ID, - Name: r.Name, - Mode: models.FilterMode(r.Mode), - Filter: r.Filter, + ID: r.ID, + Mode: r.Mode, + Name: r.Name, + } + + // decode the filters from json + if r.FindFilter != "" { + ret.FindFilter = &models.FindFilterType{} + decodeJSON(r.FindFilter, &ret.FindFilter) + } + if r.ObjectFilter != "" { + ret.ObjectFilter = make(map[string]interface{}) + decodeJSON(r.ObjectFilter, &ret.ObjectFilter) + } + if r.UIOptions != "" { + ret.UIOptions = make(map[string]interface{}) + decodeJSON(r.UIOptions, &ret.UIOptions) } return ret @@ -46,7 +90,6 @@ func (r *savedFilterRow) resolve() *models.SavedFilter { type SavedFilterStore struct { repository - tableMgr *table } @@ -77,7 +120,7 @@ func (qb *SavedFilterStore) Create(ctx context.Context, newObject *models.SavedF return err } - updated, err := qb.find(ctx, id) + updated, err := qb.Find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } @@ -166,7 +209,6 @@ func (qb *SavedFilterStore) find(ctx context.Context, id int) (*models.SavedFilt return ret, nil } -// returns nil, sql.ErrNoRows if not found func (qb *SavedFilterStore) get(ctx context.Context, q *goqu.SelectDataset) (*models.SavedFilter, error) { ret, err := qb.getMany(ctx, q) if err != nil { diff --git a/pkg/sqlite/saved_filter_test.go b/pkg/sqlite/saved_filter_test.go index 0a6e32a1ca4..aa98121fd45 100644 --- a/pkg/sqlite/saved_filter_test.go +++ b/pkg/sqlite/saved_filter_test.go @@ -42,15 +42,35 @@ func TestSavedFilterFindByMode(t *testing.T) { func TestSavedFilterDestroy(t *testing.T) { const filterName = "filterToDestroy" - const testFilter = "{}" + filterQ := "" + filterPage := 1 + filterPerPage := 40 + filterSort := "date" + filterDirection := models.SortDirectionEnumAsc + findFilter := models.FindFilterType{ + Q: &filterQ, + Page: &filterPage, + PerPage: &filterPerPage, + Sort: &filterSort, + Direction: &filterDirection, + } + objectFilter := map[string]interface{}{ + "test": "foo", + } + uiOptions := map[string]interface{}{ + "display_mode": 1, + "zoom_index": 1, + } var id int // create the saved filter to destroy withTxn(func(ctx context.Context) error { newFilter := models.SavedFilter{ - Name: filterName, - Mode: models.FilterModeScenes, - Filter: testFilter, + Name: filterName, + Mode: models.FilterModeScenes, + FindFilter: &findFilter, + ObjectFilter: objectFilter, + UIOptions: uiOptions, } err := db.SavedFilter.Create(ctx, &newFilter) @@ -88,12 +108,32 @@ func TestSavedFilterFindDefault(t *testing.T) { } func TestSavedFilterSetDefault(t *testing.T) { - const newFilter = "foo" + filterQ := "" + filterPage := 1 + filterPerPage := 40 + filterSort := "date" + filterDirection := models.SortDirectionEnumAsc + findFilter := models.FindFilterType{ + Q: &filterQ, + Page: &filterPage, + PerPage: &filterPerPage, + Sort: &filterSort, + Direction: &filterDirection, + } + objectFilter := map[string]interface{}{ + "test": "foo", + } + uiOptions := map[string]interface{}{ + "display_mode": 1, + "zoom_index": 1, + } withTxn(func(ctx context.Context) error { err := db.SavedFilter.SetDefault(ctx, &models.SavedFilter{ - Mode: models.FilterModeMovies, - Filter: newFilter, + Mode: models.FilterModeMovies, + FindFilter: &findFilter, + ObjectFilter: objectFilter, + UIOptions: uiOptions, }) return err @@ -104,7 +144,7 @@ func TestSavedFilterSetDefault(t *testing.T) { def, err := db.SavedFilter.FindDefault(ctx, models.FilterModeMovies) if err == nil { defID = def.ID - assert.Equal(t, newFilter, def.Filter) + assert.Equal(t, &findFilter, def.FindFilter) } return err diff --git a/pkg/sqlite/scene.go b/pkg/sqlite/scene.go index 8fc37937b8f..215c1740953 100644 --- a/pkg/sqlite/scene.go +++ b/pkg/sqlite/scene.go @@ -17,7 +17,6 @@ import ( "gopkg.in/guregu/null.v4" "gopkg.in/guregu/null.v4/zero" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stashapp/stash/pkg/utils" @@ -232,13 +231,13 @@ func (qb *SceneStore) selectDataset() *goqu.SelectDataset { checksum, goqu.On( checksum.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)), - checksum.Col("type").Eq(file.FingerprintTypeMD5), + checksum.Col("type").Eq(models.FingerprintTypeMD5), ), ).LeftJoin( oshash, goqu.On( oshash.Col(fileIDColumn).Eq(scenesFilesJoinTable.Col(fileIDColumn)), - oshash.Col("type").Eq(file.FingerprintTypeOshash), + oshash.Col("type").Eq(models.FingerprintTypeOshash), ), ).Select( qb.table().All(), @@ -250,7 +249,7 @@ func (qb *SceneStore) selectDataset() *goqu.SelectDataset { ) } -func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileIDs []file.ID) error { +func (qb *SceneStore) Create(ctx context.Context, newObject *models.Scene, fileIDs []models.FileID) error { var r sceneRow r.fromScene(*newObject) @@ -411,7 +410,7 @@ func (qb *SceneStore) Update(ctx context.Context, updatedObject *models.Scene) e } if updatedObject.Files.Loaded() { - fileIDs := make([]file.ID, len(updatedObject.Files.List())) + fileIDs := make([]models.FileID, len(updatedObject.Files.List())) for i, f := range updatedObject.Files.List() { fileIDs[i] = f.ID } @@ -538,7 +537,7 @@ func (qb *SceneStore) getMany(ctx context.Context, q *goqu.SelectDataset) ([]*mo return ret, nil } -func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, error) { +func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*models.VideoFile, error) { fileIDs, err := qb.filesRepository().get(ctx, id) if err != nil { return nil, err @@ -550,10 +549,10 @@ func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, return nil, err } - ret := make([]*file.VideoFile, len(files)) + ret := make([]*models.VideoFile, len(files)) for i, f := range files { var ok bool - ret[i], ok = f.(*file.VideoFile) + ret[i], ok = f.(*models.VideoFile) if !ok { return nil, fmt.Errorf("expected file to be *file.VideoFile not %T", f) } @@ -562,12 +561,12 @@ func (qb *SceneStore) GetFiles(ctx context.Context, id int) ([]*file.VideoFile, return ret, nil } -func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]file.ID, error) { +func (qb *SceneStore) GetManyFileIDs(ctx context.Context, ids []int) ([][]models.FileID, error) { const primaryOnly = false return qb.filesRepository().getMany(ctx, ids, primaryOnly) } -func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) { +func (qb *SceneStore) FindByFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { sq := dialect.From(scenesFilesJoinTable).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where( scenesFilesJoinTable.Col(fileIDColumn).Eq(fileID), ) @@ -580,7 +579,7 @@ func (qb *SceneStore) FindByFileID(ctx context.Context, fileID file.ID) ([]*mode return ret, nil } -func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID file.ID) ([]*models.Scene, error) { +func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID models.FileID) ([]*models.Scene, error) { sq := dialect.From(scenesFilesJoinTable).Select(scenesFilesJoinTable.Col(sceneIDColumn)).Where( scenesFilesJoinTable.Col(fileIDColumn).Eq(fileID), scenesFilesJoinTable.Col("primary").Eq(1), @@ -594,14 +593,14 @@ func (qb *SceneStore) FindByPrimaryFileID(ctx context.Context, fileID file.ID) ( return ret, nil } -func (qb *SceneStore) CountByFileID(ctx context.Context, fileID file.ID) (int, error) { +func (qb *SceneStore) CountByFileID(ctx context.Context, fileID models.FileID) (int, error) { joinTable := scenesFilesJoinTable q := dialect.Select(goqu.COUNT("*")).From(joinTable).Where(joinTable.Col(fileIDColumn).Eq(fileID)) return count(ctx, q) } -func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerprint) ([]*models.Scene, error) { +func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []models.Fingerprint) ([]*models.Scene, error) { fingerprintTable := fingerprintTableMgr.table var ex []exp.Expression @@ -629,18 +628,18 @@ func (qb *SceneStore) FindByFingerprints(ctx context.Context, fp []file.Fingerpr } func (qb *SceneStore) FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: checksum, }, }) } func (qb *SceneStore) FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error) { - return qb.FindByFingerprints(ctx, []file.Fingerprint{ + return qb.FindByFingerprints(ctx, []models.Fingerprint{ { - Type: file.FingerprintTypeOshash, + Type: models.FingerprintTypeOshash, Fingerprint: oshash, }, }) @@ -1684,7 +1683,7 @@ func (qb *SceneStore) destroyCover(ctx context.Context, sceneID int) error { return qb.DestroyImage(ctx, sceneID, sceneCoverBlobColumn) } -func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []file.ID) error { +func (qb *SceneStore) AssignFiles(ctx context.Context, sceneID int, fileIDs []models.FileID) error { // assuming a file can only be assigned to a single scene if err := scenesFilesTableMgr.destroyJoins(ctx, fileIDs); err != nil { return err @@ -1736,9 +1735,9 @@ func (qb *SceneStore) filesRepository() *filesRepository { } } -func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID file.ID) error { +func (qb *SceneStore) AddFileID(ctx context.Context, id int, fileID models.FileID) error { const firstPrimary = false - return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []file.ID{fileID}) + return scenesFilesTableMgr.insertJoins(ctx, id, firstPrimary, []models.FileID{fileID}) } func (qb *SceneStore) performersRepository() *joinRepository { diff --git a/pkg/sqlite/scene_test.go b/pkg/sqlite/scene_test.go index 8ab34a112f6..0da236f4d7d 100644 --- a/pkg/sqlite/scene_test.go +++ b/pkg/sqlite/scene_test.go @@ -14,7 +14,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stretchr/testify/assert" @@ -165,8 +164,8 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { Organized: true, OCounter: ocounter, StudioID: &studioIDs[studioIdxWithScene], - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ - videoFile.(*file.VideoFile), + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ + videoFile.(*models.VideoFile), }), CreatedAt: createdAt, UpdatedAt: updatedAt, @@ -248,7 +247,7 @@ func Test_sceneQueryBuilder_Create(t *testing.T) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) - var fileIDs []file.ID + var fileIDs []models.FileID if tt.newObject.Files.Loaded() { for _, f := range tt.newObject.Files.List() { fileIDs = append(fileIDs, f.ID) @@ -308,7 +307,7 @@ func clearSceneFileIDs(scene *models.Scene) { } } -func makeSceneFileWithID(i int) *file.VideoFile { +func makeSceneFileWithID(i int) *models.VideoFile { ret := makeSceneFile(i) ret.ID = sceneFileIDs[i] return ret @@ -626,7 +625,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { }, models.Scene{ ID: sceneIDs[sceneIdxWithSpacedName], - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ makeSceneFile(sceneIdxWithSpacedName), }), Title: title, @@ -678,7 +677,7 @@ func Test_sceneQueryBuilder_UpdatePartial(t *testing.T) { models.Scene{ ID: sceneIDs[sceneIdxWithSpacedName], OCounter: getOCounter(sceneIdxWithSpacedName), - Files: models.NewRelatedVideoFiles([]*file.VideoFile{ + Files: models.NewRelatedVideoFiles([]*models.VideoFile{ makeSceneFile(sceneIdxWithSpacedName), }), GalleryIDs: models.NewRelatedIDs([]int{}), @@ -1460,7 +1459,7 @@ func makeSceneWithID(index int) *models.Scene { ret := makeScene(index) ret.ID = sceneIDs[index] - ret.Files = models.NewRelatedVideoFiles([]*file.VideoFile{makeSceneFile(index)}) + ret.Files = models.NewRelatedVideoFiles([]*models.VideoFile{makeSceneFile(index)}) return ret } @@ -1891,7 +1890,7 @@ func scenesToIDs(i []*models.Scene) []int { func Test_sceneStore_FindByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID include []int exclude []int }{ @@ -1940,7 +1939,7 @@ func Test_sceneStore_FindByFileID(t *testing.T) { func Test_sceneStore_CountByFileID(t *testing.T) { tests := []struct { name string - fileID file.ID + fileID models.FileID want int }{ { @@ -3053,8 +3052,8 @@ func queryScenes(ctx context.Context, t *testing.T, queryBuilder models.SceneRea func createScene(ctx context.Context, width int, height int) (*models.Scene, error) { name := fmt.Sprintf("TestSceneQueryResolutionModifiers %d %d", width, height) - sceneFile := &file.VideoFile{ - BaseFile: &file.BaseFile{ + sceneFile := &models.VideoFile{ + BaseFile: &models.BaseFile{ Basename: name, ParentFolderID: folderIDs[folderIdxWithSceneFiles], }, @@ -3068,7 +3067,7 @@ func createScene(ctx context.Context, width int, height int) (*models.Scene, err scene := &models.Scene{} - if err := db.Scene.Create(ctx, scene, []file.ID{sceneFile.ID}); err != nil { + if err := db.Scene.Create(ctx, scene, []models.FileID{sceneFile.ID}); err != nil { return nil, err } @@ -4559,7 +4558,7 @@ func TestSceneStore_AssignFiles(t *testing.T) { tests := []struct { name string sceneID int - fileID file.ID + fileID models.FileID wantErr bool }{ { @@ -4587,7 +4586,7 @@ func TestSceneStore_AssignFiles(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { withRollbackTxn(func(ctx context.Context) error { - if err := qb.AssignFiles(ctx, tt.sceneID, []file.ID{tt.fileID}); (err != nil) != tt.wantErr { + if err := qb.AssignFiles(ctx, tt.sceneID, []models.FileID{tt.fileID}); (err != nil) != tt.wantErr { t.Errorf("SceneStore.AssignFiles() error = %v, wantErr %v", err, tt.wantErr) } diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index c57f272c7d4..737a28e7230 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -14,7 +14,6 @@ import ( "testing" "time" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil/intslice" "github.com/stashapp/stash/pkg/sqlite" @@ -283,11 +282,11 @@ const ( ) var ( - folderIDs []file.FolderID - fileIDs []file.ID - sceneFileIDs []file.ID - imageFileIDs []file.ID - galleryFileIDs []file.ID + folderIDs []models.FolderID + fileIDs []models.FileID + sceneFileIDs []models.FileID + imageFileIDs []models.FileID + galleryFileIDs []models.FileID chapterIDs []int sceneIDs []int @@ -700,8 +699,8 @@ func getFolderModTime(index int) time.Time { return time.Date(2000, 1, (index%10)+1, 0, 0, 0, 0, time.UTC) } -func makeFolder(i int) file.Folder { - var folderID *file.FolderID +func makeFolder(i int) models.Folder { + var folderID *models.FolderID var folderIdx *int if pidx, ok := folderParentFolders[i]; ok { folderIdx = &pidx @@ -709,9 +708,9 @@ func makeFolder(i int) file.Folder { folderID = &v } - return file.Folder{ + return models.Folder{ ParentFolderID: folderID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ // zip files have to be added after creating files ModTime: getFolderModTime(i), }, @@ -748,8 +747,8 @@ func getFileModTime(index int) time.Time { return getFolderModTime(index) } -func getFileFingerprints(index int) []file.Fingerprint { - return []file.Fingerprint{ +func getFileFingerprints(index int) []models.Fingerprint { + return []models.Fingerprint{ { Type: "MD5", Fingerprint: getPrefixedStringValue("file", index, "md5"), @@ -772,22 +771,22 @@ func getFileDuration(index int) float64 { return float64(duration) + 0.432 } -func makeFile(i int) file.File { +func makeFile(i int) models.File { folderID := folderIDs[fileFolders[i]] if folderID == 0 { folderID = folderIDs[folderIdxWithFiles] } - var zipFileID *file.ID + var zipFileID *models.FileID if zipFileIndex, found := fileZipFiles[i]; found { zipFileID = &fileIDs[zipFileIndex] } - var ret file.File - baseFile := &file.BaseFile{ + var ret models.File + baseFile := &models.BaseFile{ Basename: getFileBaseName(i), ParentFolderID: folderID, - DirEntry: file.DirEntry{ + DirEntry: models.DirEntry{ // zip files have to be added after creating files ModTime: getFileModTime(i), ZipFileID: zipFileID, @@ -799,7 +798,7 @@ func makeFile(i int) file.File { ret = baseFile if i >= fileIdxStartVideoFiles && i < fileIdxStartImageFiles { - ret = &file.VideoFile{ + ret = &models.VideoFile{ BaseFile: baseFile, Format: getFileStringValue(i, "format"), Width: getWidth(i), @@ -811,7 +810,7 @@ func makeFile(i int) file.File { BitRate: int64(getFileDuration(i)) * 3, } } else if i >= fileIdxStartImageFiles && i < fileIdxStartGalleryFiles { - ret = &file.ImageFile{ + ret = &models.ImageFile{ BaseFile: baseFile, Format: getFileStringValue(i, "format"), Width: getWidth(i), @@ -977,27 +976,27 @@ func getSceneBasename(index int) string { return getSceneStringValue(index, pathField) } -func makeSceneFile(i int) *file.VideoFile { - fp := []file.Fingerprint{ +func makeSceneFile(i int) *models.VideoFile { + fp := []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getSceneStringValue(i, checksumField), }, { - Type: file.FingerprintTypeOshash, + Type: models.FingerprintTypeOshash, Fingerprint: getSceneStringValue(i, "oshash"), }, } if i != sceneIdxMissingPhash { - fp = append(fp, file.Fingerprint{ - Type: file.FingerprintTypePhash, + fp = append(fp, models.Fingerprint{ + Type: models.FingerprintTypePhash, Fingerprint: getScenePhash(i, "phash"), }) } - return &file.VideoFile{ - BaseFile: &file.BaseFile{ + return &models.VideoFile{ + BaseFile: &models.BaseFile{ Path: getFilePath(folderIdxWithSceneFiles, getSceneBasename(i)), Basename: getSceneBasename(i), ParentFolderID: folderIDs[folderIdxWithSceneFiles], @@ -1100,7 +1099,7 @@ func createScenes(ctx context.Context, n int) error { scene := makeScene(i) - if err := sqb.Create(ctx, scene, []file.ID{f.ID}); err != nil { + if err := sqb.Create(ctx, scene, []models.FileID{f.ID}); err != nil { return fmt.Errorf("Error creating scene %v+: %s", scene, err.Error()) } @@ -1118,15 +1117,15 @@ func getImageBasename(index int) string { return getImageStringValue(index, pathField) } -func makeImageFile(i int) *file.ImageFile { - return &file.ImageFile{ - BaseFile: &file.BaseFile{ +func makeImageFile(i int) *models.ImageFile { + return &models.ImageFile{ + BaseFile: &models.BaseFile{ Path: getFilePath(folderIdxWithImageFiles, getImageBasename(i)), Basename: getImageBasename(i), ParentFolderID: folderIDs[folderIdxWithImageFiles], - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getImageStringValue(i, checksumField), }, }, @@ -1180,7 +1179,7 @@ func createImages(ctx context.Context, n int) error { err := qb.Create(ctx, &models.ImageCreateInput{ Image: image, - FileIDs: []file.ID{f.ID}, + FileIDs: []models.FileID{f.ID}, }) if err != nil { @@ -1209,14 +1208,14 @@ func getGalleryBasename(index int) string { return getGalleryStringValue(index, pathField) } -func makeGalleryFile(i int) *file.BaseFile { - return &file.BaseFile{ +func makeGalleryFile(i int) *models.BaseFile { + return &models.BaseFile{ Path: getFilePath(folderIdxWithGalleryFiles, getGalleryBasename(i)), Basename: getGalleryBasename(i), ParentFolderID: folderIDs[folderIdxWithGalleryFiles], - Fingerprints: []file.Fingerprint{ + Fingerprints: []models.Fingerprint{ { - Type: file.FingerprintTypeMD5, + Type: models.FingerprintTypeMD5, Fingerprint: getGalleryStringValue(i, checksumField), }, }, @@ -1255,14 +1254,14 @@ func createGalleries(ctx context.Context, n int) error { fqb := db.File for i := 0; i < n; i++ { - var fileIDs []file.ID + var fileIDs []models.FileID if i != galleryIdxWithoutFile { f := makeGalleryFile(i) if err := fqb.Create(ctx, f); err != nil { return fmt.Errorf("creating gallery file: %w", err) } galleryFileIDs = append(galleryFileIDs, f.ID) - fileIDs = []file.ID{f.ID} + fileIDs = []models.FileID{f.ID} } else { galleryFileIDs = append(galleryFileIDs, 0) } @@ -1714,10 +1713,29 @@ func getSavedFilterName(index int) string { func createSavedFilters(ctx context.Context, qb models.SavedFilterReaderWriter, n int) error { for i := 0; i < n; i++ { + filterQ := "" + filterPage := i + filterPerPage := i * 40 + filterSort := "date" + filterDirection := models.SortDirectionEnumAsc + findFilter := models.FindFilterType{ + Q: &filterQ, + Page: &filterPage, + PerPage: &filterPerPage, + Sort: &filterSort, + Direction: &filterDirection, + } savedFilter := models.SavedFilter{ - Mode: getSavedFilterMode(i), - Name: getSavedFilterName(i), - Filter: getPrefixedStringValue("savedFilter", i, "Filter"), + Mode: getSavedFilterMode(i), + Name: getSavedFilterName(i), + FindFilter: &findFilter, + ObjectFilter: map[string]interface{}{ + "test": "object", + }, + UIOptions: map[string]interface{}{ + "display_mode": 1, + "zoom_index": 1, + }, } err := qb.Create(ctx, &savedFilter) diff --git a/pkg/sqlite/table.go b/pkg/sqlite/table.go index e3cedce37d1..510b5877cf8 100644 --- a/pkg/sqlite/table.go +++ b/pkg/sqlite/table.go @@ -11,7 +11,6 @@ import ( "github.com/jmoiron/sqlx" "gopkg.in/guregu/null.v4" - "github.com/stashapp/stash/pkg/file" "github.com/stashapp/stash/pkg/logger" "github.com/stashapp/stash/pkg/models" "github.com/stashapp/stash/pkg/sliceutil" @@ -707,12 +706,12 @@ type relatedFilesTable struct { } // type scenesFilesRow struct { -// SceneID int `db:"scene_id"` -// Primary bool `db:"primary"` -// FileID file.ID `db:"file_id"` +// SceneID int `db:"scene_id"` +// Primary bool `db:"primary"` +// FileID models.FileID `db:"file_id"` // } -func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool, fileID file.ID) error { +func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool, fileID models.FileID) error { q := dialect.Insert(t.table.table).Cols(t.idColumn.GetCol(), "primary", "file_id").Vals( goqu.Vals{id, primary, fileID}, ) @@ -724,7 +723,7 @@ func (t *relatedFilesTable) insertJoin(ctx context.Context, id int, primary bool return nil } -func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimary bool, fileIDs []file.ID) error { +func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimary bool, fileIDs []models.FileID) error { for i, fk := range fileIDs { if err := t.insertJoin(ctx, id, firstPrimary && i == 0, fk); err != nil { return err @@ -734,7 +733,7 @@ func (t *relatedFilesTable) insertJoins(ctx context.Context, id int, firstPrimar return nil } -func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs []file.ID) error { +func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs []models.FileID) error { if err := t.destroy(ctx, []int{id}); err != nil { return err } @@ -744,7 +743,7 @@ func (t *relatedFilesTable) replaceJoins(ctx context.Context, id int, fileIDs [] } // destroyJoins destroys all entries in the table with the provided fileIDs -func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []file.ID) error { +func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []models.FileID) error { q := dialect.Delete(t.table.table).Where(t.table.table.Col("file_id").In(fileIDs)) if _, err := exec(ctx, q); err != nil { @@ -754,7 +753,7 @@ func (t *relatedFilesTable) destroyJoins(ctx context.Context, fileIDs []file.ID) return nil } -func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID file.ID) error { +func (t *relatedFilesTable) setPrimary(ctx context.Context, id int, fileID models.FileID) error { table := t.table.table q := dialect.Update(table).Prepared(true).Set(goqu.Record{ diff --git a/pkg/sqlite/values.go b/pkg/sqlite/values.go index be812275f89..5e196051bd2 100644 --- a/pkg/sqlite/values.go +++ b/pkg/sqlite/values.go @@ -1,9 +1,9 @@ package sqlite import ( - "github.com/stashapp/stash/pkg/file" - "gopkg.in/guregu/null.v4" + + "github.com/stashapp/stash/pkg/models" ) // null package does not provide methods to convert null.Int to int pointer @@ -33,27 +33,27 @@ func nullFloatPtr(i null.Float) *float64 { return &v } -func nullIntFolderIDPtr(i null.Int) *file.FolderID { +func nullIntFolderIDPtr(i null.Int) *models.FolderID { if !i.Valid { return nil } - v := file.FolderID(i.Int64) + v := models.FolderID(i.Int64) return &v } -func nullIntFileIDPtr(i null.Int) *file.ID { +func nullIntFileIDPtr(i null.Int) *models.FileID { if !i.Valid { return nil } - v := file.ID(i.Int64) + v := models.FileID(i.Int64) return &v } -func nullIntFromFileIDPtr(i *file.ID) null.Int { +func nullIntFromFileIDPtr(i *models.FileID) null.Int { if i == nil { return null.NewInt(0, false) } @@ -61,7 +61,7 @@ func nullIntFromFileIDPtr(i *file.ID) null.Int { return null.IntFrom(int64(*i)) } -func nullIntFromFolderIDPtr(i *file.FolderID) null.Int { +func nullIntFromFolderIDPtr(i *models.FolderID) null.Int { if i == nil { return null.NewInt(0, false) } diff --git a/pkg/studio/export.go b/pkg/studio/export.go index 2ad158c17e1..9d6d79299b4 100644 --- a/pkg/studio/export.go +++ b/pkg/studio/export.go @@ -11,15 +11,15 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type FinderImageAliasStashIDGetter interface { - Finder - GetImage(ctx context.Context, studioID int) ([]byte, error) +type FinderImageStashIDGetter interface { + models.StudioGetter models.AliasLoader models.StashIDLoader + GetImage(ctx context.Context, studioID int) ([]byte, error) } // ToJSON converts a Studio object into its JSON equivalent. -func ToJSON(ctx context.Context, reader FinderImageAliasStashIDGetter, studio *models.Studio) (*jsonschema.Studio, error) { +func ToJSON(ctx context.Context, reader FinderImageStashIDGetter, studio *models.Studio) (*jsonschema.Studio, error) { newStudioJSON := jsonschema.Studio{ Name: studio.Name, URL: studio.URL, diff --git a/pkg/studio/import.go b/pkg/studio/import.go index 653dfce611f..df712daab79 100644 --- a/pkg/studio/import.go +++ b/pkg/studio/import.go @@ -10,16 +10,15 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { - NameFinderCreator - Update(ctx context.Context, updatedStudio *models.Studio) error - UpdateImage(ctx context.Context, studioID int, image []byte) error +type ImporterReaderWriter interface { + models.StudioCreatorUpdater + FindByName(ctx context.Context, name string, nocase bool) (*models.Studio, error) } var ErrParentStudioNotExist = errors.New("parent studio does not exist") type Importer struct { - ReaderWriter NameFinderCreatorUpdater + ReaderWriter ImporterReaderWriter Input jsonschema.Studio MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/studio/query.go b/pkg/studio/query.go index ce3594eb17b..b20cec33109 100644 --- a/pkg/studio/query.go +++ b/pkg/studio/query.go @@ -6,21 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Finder interface { - Find(ctx context.Context, id int) (*models.Studio, error) -} - -type Queryer interface { - Query(ctx context.Context, studioFilter *models.StudioFilterType, findFilter *models.FindFilterType) ([]*models.Studio, int, error) -} - -type FinderQueryer interface { - Finder - Queryer - models.AliasLoader -} - -func ByName(ctx context.Context, qb Queryer, name string) (*models.Studio, error) { +func ByName(ctx context.Context, qb models.StudioQueryer, name string) (*models.Studio, error) { f := &models.StudioFilterType{ Name: &models.StringCriterionInput{ Value: name, @@ -44,7 +30,7 @@ func ByName(ctx context.Context, qb Queryer, name string) (*models.Studio, error return nil, nil } -func ByAlias(ctx context.Context, qb Queryer, alias string) (*models.Studio, error) { +func ByAlias(ctx context.Context, qb models.StudioQueryer, alias string) (*models.Studio, error) { f := &models.StudioFilterType{ Aliases: &models.StringCriterionInput{ Value: alias, diff --git a/pkg/studio/update.go b/pkg/studio/update.go index 0b159edcd12..a1a16a0c491 100644 --- a/pkg/studio/update.go +++ b/pkg/studio/update.go @@ -12,11 +12,6 @@ var ( ErrStudioOwnAncestor = errors.New("studio cannot be an ancestor of itself") ) -type NameFinderCreator interface { - FindByName(ctx context.Context, name string, nocase bool) (*models.Studio, error) - Create(ctx context.Context, newStudio *models.Studio) error -} - type NameExistsError struct { Name string } @@ -36,7 +31,7 @@ func (e *NameUsedByAliasError) Error() string { // EnsureStudioNameUnique returns an error if the studio name provided // is used as a name or alias of another existing tag. -func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb Queryer) error { +func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb models.StudioQueryer) error { // ensure name is unique sameNameStudio, err := ByName(ctx, qb, name) if err != nil { @@ -65,7 +60,7 @@ func EnsureStudioNameUnique(ctx context.Context, id int, name string, qb Queryer return nil } -func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Queryer) error { +func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb models.StudioQueryer) error { for _, a := range aliases { if err := EnsureStudioNameUnique(ctx, id, a, qb); err != nil { return err @@ -75,11 +70,17 @@ func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Query return nil } +type ValidateModifyReader interface { + models.StudioGetter + models.StudioQueryer + models.AliasLoader +} + // Checks to make sure that: // 1. The studio exists locally // 2. The studio is not its own ancestor // 3. The studio's aliases are unique -func ValidateModify(ctx context.Context, s models.StudioPartial, qb FinderQueryer) error { +func ValidateModify(ctx context.Context, s models.StudioPartial, qb ValidateModifyReader) error { existing, err := qb.Find(ctx, s.ID) if err != nil { return err @@ -110,7 +111,7 @@ func ValidateModify(ctx context.Context, s models.StudioPartial, qb FinderQuerye return nil } -func validateParent(ctx context.Context, studioID int, newParentID int, qb FinderQueryer) error { +func validateParent(ctx context.Context, studioID int, newParentID int, qb models.StudioGetter) error { if newParentID == studioID { return ErrStudioOwnAncestor } diff --git a/pkg/tag/import.go b/pkg/tag/import.go index 67bdbc460ca..368815bbe44 100644 --- a/pkg/tag/import.go +++ b/pkg/tag/import.go @@ -9,13 +9,9 @@ import ( "github.com/stashapp/stash/pkg/utils" ) -type NameFinderCreatorUpdater interface { +type ImporterReaderWriter interface { + models.TagCreatorUpdater FindByName(ctx context.Context, name string, nocase bool) (*models.Tag, error) - Create(ctx context.Context, newTag *models.Tag) error - Update(ctx context.Context, updatedTag *models.Tag) error - UpdateImage(ctx context.Context, tagID int, image []byte) error - UpdateAliases(ctx context.Context, tagID int, aliases []string) error - UpdateParentTags(ctx context.Context, tagID int, parentIDs []int) error } type ParentTagNotExistError struct { @@ -31,7 +27,7 @@ func (e ParentTagNotExistError) MissingParent() string { } type Importer struct { - ReaderWriter NameFinderCreatorUpdater + ReaderWriter ImporterReaderWriter Input jsonschema.Tag MissingRefBehaviour models.ImportMissingRefEnum diff --git a/pkg/tag/query.go b/pkg/tag/query.go index a048054d763..76567434d1a 100644 --- a/pkg/tag/query.go +++ b/pkg/tag/query.go @@ -6,15 +6,7 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type Finder interface { - Find(ctx context.Context, id int) (*models.Tag, error) -} - -type Queryer interface { - Query(ctx context.Context, tagFilter *models.TagFilterType, findFilter *models.FindFilterType) ([]*models.Tag, int, error) -} - -func ByName(ctx context.Context, qb Queryer, name string) (*models.Tag, error) { +func ByName(ctx context.Context, qb models.TagQueryer, name string) (*models.Tag, error) { f := &models.TagFilterType{ Name: &models.StringCriterionInput{ Value: name, @@ -38,7 +30,7 @@ func ByName(ctx context.Context, qb Queryer, name string) (*models.Tag, error) { return nil, nil } -func ByAlias(ctx context.Context, qb Queryer, alias string) (*models.Tag, error) { +func ByAlias(ctx context.Context, qb models.TagQueryer, alias string) (*models.Tag, error) { f := &models.TagFilterType{ Aliases: &models.StringCriterionInput{ Value: alias, diff --git a/pkg/tag/update.go b/pkg/tag/update.go index 3b0dbd4141e..dcb78bf9cab 100644 --- a/pkg/tag/update.go +++ b/pkg/tag/update.go @@ -7,11 +7,6 @@ import ( "github.com/stashapp/stash/pkg/models" ) -type NameFinderCreator interface { - FindByNames(ctx context.Context, names []string, nocase bool) ([]*models.Tag, error) - Create(ctx context.Context, newTag *models.Tag) error -} - type NameExistsError struct { Name string } @@ -43,7 +38,7 @@ func (e *InvalidTagHierarchyError) Error() string { // EnsureTagNameUnique returns an error if the tag name provided // is used as a name or alias of another existing tag. -func EnsureTagNameUnique(ctx context.Context, id int, name string, qb Queryer) error { +func EnsureTagNameUnique(ctx context.Context, id int, name string, qb models.TagQueryer) error { // ensure name is unique sameNameTag, err := ByName(ctx, qb, name) if err != nil { @@ -72,7 +67,7 @@ func EnsureTagNameUnique(ctx context.Context, id int, name string, qb Queryer) e return nil } -func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Queryer) error { +func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb models.TagQueryer) error { for _, a := range aliases { if err := EnsureTagNameUnique(ctx, id, a, qb); err != nil { return err @@ -82,14 +77,14 @@ func EnsureAliasesUnique(ctx context.Context, id int, aliases []string, qb Query return nil } -type RelationshipGetter interface { +type RelationshipFinder interface { FindAllAncestors(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) FindAllDescendants(ctx context.Context, tagID int, excludeIDs []int) ([]*models.TagPath, error) FindByChildTagID(ctx context.Context, childID int) ([]*models.Tag, error) FindByParentTagID(ctx context.Context, parentID int) ([]*models.Tag, error) } -func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipGetter) error { +func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs []int, qb RelationshipFinder) error { id := tag.ID allAncestors := make(map[int]*models.TagPath) allDescendants := make(map[int]*models.TagPath) @@ -177,7 +172,7 @@ func ValidateHierarchy(ctx context.Context, tag *models.Tag, parentIDs, childIDs return nil } -func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipGetter) ([]int, []int, error) { +func MergeHierarchy(ctx context.Context, destination int, sources []int, qb RelationshipFinder) ([]int, []int, error) { var mergedParents, mergedChildren []int allIds := append([]int{destination}, sources...) diff --git a/ui/v2.5/.eslintrc.json b/ui/v2.5/.eslintrc.json index f37f8028ca6..edce4355156 100644 --- a/ui/v2.5/.eslintrc.json +++ b/ui/v2.5/.eslintrc.json @@ -74,7 +74,7 @@ "prefer-destructuring": ["error", { "object": true, "array": false }], "@typescript-eslint/no-use-before-define": [ "error", - { "functions": false, "classes": true } + { "functions": false, "classes": false } ], "no-nested-ternary": "off" } diff --git a/ui/v2.5/package.json b/ui/v2.5/package.json index 60b2d35f477..24039dfbad8 100644 --- a/ui/v2.5/package.json +++ b/ui/v2.5/package.json @@ -32,7 +32,6 @@ "@silvermine/videojs-airplay": "^1.2.0", "@silvermine/videojs-chromecast": "^1.4.1", "apollo-upload-client": "^17.0.0", - "axios": "^1.3.3", "base64-blob": "^1.4.1", "bootstrap": "^4.6.2", "classnames": "^2.3.2", @@ -69,6 +68,7 @@ "ua-parser-js": "^1.0.34", "universal-cookie": "^4.0.4", "video.js": "^7.21.3", + "videojs-abloop": "^1.2.0", "videojs-contrib-dash": "^5.1.1", "videojs-mobile-ui": "^0.8.0", "videojs-seek-buttons": "^3.0.1", diff --git a/ui/v2.5/src/@types/videojs-abloop.d.ts b/ui/v2.5/src/@types/videojs-abloop.d.ts new file mode 100644 index 00000000000..b44d9f50c6e --- /dev/null +++ b/ui/v2.5/src/@types/videojs-abloop.d.ts @@ -0,0 +1,35 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + +declare module "videojs-abloop" { + import videojs from "video.js"; + + declare function abLoopPlugin( + window: Window & typeof globalThis, + player: videojs + ): abLoopPlugin.Plugin; + + declare namespace abLoopPlugin { + interface Options { + start: number | boolean; + end: number | boolean; + enabled: boolean; + loopIfBeforeStart: boolean; + loopIfAfterEnd: boolean; + pauseBeforeLooping: boolean; + pauseAfterLooping: boolean; + } + + class Plugin extends videojs.Plugin { + getOptions(): Options; + setOptions(o: Options): void; + } + } + + export = abLoopPlugin; + + declare module "video.js" { + interface VideoJsPlayer { + abLoopPlugin: abLoopPlugin.Plugin; + } + } +} diff --git a/ui/v2.5/src/components/FrontPage/Control.tsx b/ui/v2.5/src/components/FrontPage/Control.tsx index c655d9c3eff..3cb2cf02111 100644 --- a/ui/v2.5/src/components/FrontPage/Control.tsx +++ b/ui/v2.5/src/components/FrontPage/Control.tsx @@ -105,11 +105,11 @@ const SavedFilterResults: React.FC = ({ const filter = useMemo(() => { if (!data?.findSavedFilter) return; - const { mode, filter: filterJSON } = data.findSavedFilter; + const { mode } = data.findSavedFilter; const ret = new ListFilterModel(mode, config); ret.currentPage = 1; - ret.configureFromJSON(filterJSON); + ret.configureFromSavedFilter(data.findSavedFilter); ret.randomSeed = -1; return ret; }, [data?.findSavedFilter, config]); diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx index c0d037661f3..1701b5bc7e1 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryEditPanel.tsx @@ -19,7 +19,6 @@ import { mutateReloadScrapers, } from "src/core/StashService"; import { - PerformerSelect, TagSelect, SceneSelect, StudioSelect, @@ -39,6 +38,10 @@ import { ConfigurationContext } from "src/hooks/Config"; import isEqual from "lodash-es/isEqual"; import { DateInput } from "src/components/Shared/DateInput"; import { handleUnsavedChanges } from "src/utils/navigation"; +import { + Performer, + PerformerSelect, +} from "src/components/Performers/PerformerSelect"; interface IProps { gallery: Partial; @@ -62,6 +65,8 @@ export const GalleryEditPanel: React.FC = ({ })) ); + const [performers, setPerformers] = useState([]); + const isNew = gallery.id === undefined; const { configuration: stashConfig } = React.useContext(ConfigurationContext); @@ -139,12 +144,24 @@ export const GalleryEditPanel: React.FC = ({ ); } + function onSetPerformers(items: Performer[]) { + setPerformers(items); + formik.setFieldValue( + "performer_ids", + items.map((item) => item.id) + ); + } + useRatingKeybinds( isVisible, stashConfig?.ui?.ratingSystemOptions?.type, setRating ); + useEffect(() => { + setPerformers(gallery.performers ?? []); + }, [gallery.performers]); + useEffect(() => { if (isVisible) { Mousetrap.bind("s s", () => { @@ -238,6 +255,7 @@ export const GalleryEditPanel: React.FC = ({ return ( { onScrapeDialogClosed(data); @@ -309,8 +327,15 @@ export const GalleryEditPanel: React.FC = ({ }); if (idPerfs.length > 0) { - const newIds = idPerfs.map((p) => p.stored_id); - formik.setFieldValue("performer_ids", newIds as string[]); + onSetPerformers( + idPerfs.map((p) => { + return { + id: p.stored_id!, + name: p.name ?? "", + alias_list: [], + }; + }) + ); } } @@ -472,13 +497,8 @@ export const GalleryEditPanel: React.FC = ({ - formik.setFieldValue( - "performer_ids", - items.map((item) => item.id) - ) - } - ids={formik.values.performer_ids} + onSelect={onSetPerformers} + values={performers} /> diff --git a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx index 520f5bf4746..9d626bcb448 100644 --- a/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx +++ b/ui/v2.5/src/components/Galleries/GalleryDetails/GalleryScrapeDialog.tsx @@ -1,180 +1,29 @@ import React, { useState } from "react"; -import { FormattedMessage, useIntl } from "react-intl"; -import { - StudioSelect, - PerformerSelect, - TagSelect, -} from "src/components/Shared/Select"; +import { useIntl } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { ScrapeDialog, - ScrapeDialogRow, - ScrapeResult, ScrapedInputGroupRow, ScrapedTextAreaRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import clone from "lodash-es/clone"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; import { - useStudioCreate, - usePerformerCreate, - useTagCreate, -} from "src/core/StashService"; -import { useToast } from "src/hooks/Toast"; -import { scrapedPerformerToCreateInput } from "src/core/performers"; - -function renderScrapedStudio( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string) => void -) { - const resultValue = isNew ? result.newValue : result.originalValue; - const value = resultValue ? [resultValue] : []; - - return ( - { - if (onChange) { - onChange(items[0]?.id); - } - }} - ids={value} - /> - ); -} - -function renderScrapedStudioRow( - title: string, - result: ScrapeResult, - onChange: (value: ScrapeResult) => void, - newStudio?: GQL.ScrapedStudio, - onCreateNew?: (value: GQL.ScrapedStudio) => void -) { - return ( - renderScrapedStudio(result)} - renderNewField={() => - renderScrapedStudio(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={newStudio ? [newStudio] : undefined} - onCreateNew={() => { - if (onCreateNew && newStudio) onCreateNew(newStudio); - }} - /> - ); -} - -function renderScrapedPerformers( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string[]) => void -) { - const resultValue = isNew ? result.newValue : result.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChange) { - onChange(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); -} - -function renderScrapedPerformersRow( - title: string, - result: ScrapeResult, - onChange: (value: ScrapeResult) => void, - newPerformers: GQL.ScrapedPerformer[], - onCreateNew?: (value: GQL.ScrapedPerformer) => void -) { - const performersCopy = newPerformers.map((p) => { - const name: string = p.name ?? ""; - return { ...p, name }; - }); - - return ( - renderScrapedPerformers(result)} - renderNewField={() => - renderScrapedPerformers(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={performersCopy} - onCreateNew={(i) => { - if (onCreateNew) onCreateNew(newPerformers[i]); - }} - /> - ); -} - -function renderScrapedTags( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string[]) => void -) { - const resultValue = isNew ? result.newValue : result.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChange) { - onChange(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); -} - -function renderScrapedTagsRow( - title: string, - result: ScrapeResult, - onChange: (value: ScrapeResult) => void, - newTags: GQL.ScrapedTag[], - onCreateNew?: (value: GQL.ScrapedTag) => void -) { - return ( - renderScrapedTags(result)} - renderNewField={() => - renderScrapedTags(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - newValues={newTags} - onChange={onChange} - onCreateNew={(i) => { - if (onCreateNew) onCreateNew(newTags[i]); - }} - /> - ); -} + ScrapedPerformersRow, + ScrapedStudioRow, + ScrapedTagsRow, +} from "src/components/Shared/ScrapeDialog/ScrapedObjectsRow"; +import { sortStoredIdObjects } from "src/utils/data"; +import { Performer } from "src/components/Performers/PerformerSelect"; +import { + useCreateScrapedPerformer, + useCreateScrapedStudio, + useCreateScrapedTag, +} from "src/components/Shared/ScrapeDialog/createObjects"; interface IGalleryScrapeDialogProps { gallery: Partial; + galleryPerformers: Performer[]; scraped: GQL.ScrapedGallery; onClose: (scrapedGallery?: GQL.ScrapedGallery) => void; @@ -247,10 +96,17 @@ export const GalleryScrapeDialog: React.FC = ( return ret; } - const [performers, setPerformers] = useState>( - new ScrapeResult( - sortIdList(props.gallery.performer_ids), - mapStoredIdObjects(props.scraped.performers ?? undefined) + const [performers, setPerformers] = useState< + ScrapeResult + >( + new ScrapeResult( + sortStoredIdObjects( + props.galleryPerformers.map((p) => ({ + stored_id: p.id, + name: p.name, + })) + ), + sortStoredIdObjects(props.scraped.performers ?? undefined) ) ); const [newPerformers, setNewPerformers] = useState( @@ -271,11 +127,25 @@ export const GalleryScrapeDialog: React.FC = ( new ScrapeResult(props.gallery.details, props.scraped.details) ); - const [createStudio] = useStudioCreate(); - const [createPerformer] = usePerformerCreate(); - const [createTag] = useTagCreate(); + const createNewStudio = useCreateScrapedStudio({ + scrapeResult: studio, + setScrapeResult: setStudio, + setNewObject: setNewStudio, + }); - const Toast = useToast(); + const createNewPerformer = useCreateScrapedPerformer({ + scrapeResult: performers, + setScrapeResult: setPerformers, + newObjects: newPerformers, + setNewObjects: setNewPerformers, + }); + + const createNewTag = useCreateScrapedTag({ + scrapeResult: tags, + setScrapeResult: setTags, + newObjects: newTags, + setNewObjects: setNewTags, + }); // don't show the dialog if nothing was scraped if ( @@ -290,122 +160,6 @@ export const GalleryScrapeDialog: React.FC = ( return <>; } - async function createNewStudio(toCreate: GQL.ScrapedStudio) { - try { - const result = await createStudio({ - variables: { - input: { - name: toCreate.name, - url: toCreate.url, - }, - }, - }); - - // set the new studio as the value - setStudio(studio.cloneWithValue(result.data!.studioCreate!.id)); - setNewStudio(undefined); - - Toast.success({ - content: ( - - {toCreate.name}, - }} - /> - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewPerformer(toCreate: GQL.ScrapedPerformer) { - const input = scrapedPerformerToCreateInput(toCreate); - - try { - const result = await createPerformer({ - variables: { input }, - }); - - // add the new performer to the new performers value - const performerClone = performers.cloneWithValue(performers.newValue); - if (!performerClone.newValue) { - performerClone.newValue = []; - } - performerClone.newValue.push(result.data!.performerCreate!.id); - setPerformers(performerClone); - - // remove the performer from the list - const newPerformersClone = newPerformers.concat(); - const pIndex = newPerformersClone.indexOf(toCreate); - newPerformersClone.splice(pIndex, 1); - - setNewPerformers(newPerformersClone); - - Toast.success({ - content: ( - - {toCreate.name}, - }} - /> - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewTag(toCreate: GQL.ScrapedTag) { - const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; - try { - const result = await createTag({ - variables: { - input: tagInput, - }, - }); - - // add the new tag to the new tags value - const tagClone = tags.cloneWithValue(tags.newValue); - if (!tagClone.newValue) { - tagClone.newValue = []; - } - tagClone.newValue.push(result.data!.tagCreate!.id); - setTags(tagClone); - - // remove the tag from the list - const newTagsClone = newTags.concat(); - const pIndex = newTagsClone.indexOf(toCreate); - newTagsClone.splice(pIndex, 1); - - setNewTags(newTagsClone); - - Toast.success({ - content: ( - - {toCreate.name}, - }} - /> - - ), - }); - } catch (e) { - Toast.error(e); - } - } - function makeNewScrapedItem(): GQL.ScrapedGalleryDataFragment { const newStudioValue = studio.getNewValue(); @@ -419,12 +173,7 @@ export const GalleryScrapeDialog: React.FC = ( name: "", } : undefined, - performers: performers.getNewValue()?.map((p) => { - return { - stored_id: p, - name: "", - }; - }), + performers: performers.getNewValue(), tags: tags.getNewValue()?.map((m) => { return { stored_id: m, @@ -454,27 +203,27 @@ export const GalleryScrapeDialog: React.FC = ( result={date} onChange={(value) => setDate(value)} /> - {renderScrapedStudioRow( - intl.formatMessage({ id: "studios" }), - studio, - (value) => setStudio(value), - newStudio, - createNewStudio - )} - {renderScrapedPerformersRow( - intl.formatMessage({ id: "performers" }), - performers, - (value) => setPerformers(value), - newPerformers, - createNewPerformer - )} - {renderScrapedTagsRow( - intl.formatMessage({ id: "tags" }), - tags, - (value) => setTags(value), - newTags, - createNewTag - )} + setStudio(value)} + newStudio={newStudio} + onCreateNew={createNewStudio} + /> + setPerformers(value)} + newObjects={newPerformers} + onCreateNew={createNewPerformer} + /> + setTags(value)} + newObjects={newTags} + onCreateNew={createNewTag} + /> = ({ const { configuration } = React.useContext(ConfigurationContext); + const [performers, setPerformers] = useState([]); + const schema = yup.object({ title: yup.string().ensure(), url: yup.string().ensure(), @@ -87,12 +89,24 @@ export const ImageEditPanel: React.FC = ({ formik.setFieldValue("rating100", v); } + function onSetPerformers(items: Performer[]) { + setPerformers(items); + formik.setFieldValue( + "performer_ids", + items.map((item) => item.id) + ); + } + useRatingKeybinds( true, configuration?.ui?.ratingSystemOptions?.type, setRating ); + useEffect(() => { + setPerformers(image.performers ?? []); + }, [image.performers]); + useEffect(() => { if (isVisible) { Mousetrap.bind("s s", () => { @@ -249,13 +263,8 @@ export const ImageEditPanel: React.FC = ({ - formik.setFieldValue( - "performer_ids", - items.map((item) => item.id) - ) - } - ids={formik.values.performer_ids} + onSelect={onSetPerformers} + values={performers} /> diff --git a/ui/v2.5/src/components/List/EditFilterDialog.tsx b/ui/v2.5/src/components/List/EditFilterDialog.tsx index 581fd31fb87..7ddb7fbdb5c 100644 --- a/ui/v2.5/src/components/List/EditFilterDialog.tsx +++ b/ui/v2.5/src/components/List/EditFilterDialog.tsx @@ -270,11 +270,11 @@ export const EditFilterDialog: React.FC = ({ if (existing) { setCriterion(existing); } else { - const newCriterion = makeCriteria(configuration, option.type); + const newCriterion = makeCriteria(filter.mode, option.type); setCriterion(newCriterion); } }, - [criteria, configuration] + [filter.mode, criteria] ); const ui = (configuration?.ui ?? {}) as IUIConfig; diff --git a/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx b/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx index bb262583881..abfb74ee78e 100644 --- a/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/HierarchicalLabelValueFilter.tsx @@ -13,20 +13,19 @@ interface IHierarchicalLabelValueFilterProps { export const HierarchicalLabelValueFilter: React.FC< IHierarchicalLabelValueFilterProps > = ({ criterion, onValueChanged }) => { + const { criterionOption } = criterion; + const { type, inputType } = criterionOption; + const intl = useIntl(); if ( - criterion.criterionOption.type !== "performers" && - criterion.criterionOption.type !== "studios" && - criterion.criterionOption.type !== "parent_studios" && - criterion.criterionOption.type !== "tags" && - criterion.criterionOption.type !== "sceneTags" && - criterion.criterionOption.type !== "performerTags" && - criterion.criterionOption.type !== "parentTags" && - criterion.criterionOption.type !== "childTags" && - criterion.criterionOption.type !== "movies" - ) + inputType !== "studios" && + inputType !== "tags" && + inputType !== "scene_tags" && + inputType !== "performer_tags" + ) { return null; + } const messages = defineMessages({ studio_depth: { @@ -51,10 +50,10 @@ export const HierarchicalLabelValueFilter: React.FC< } function criterionOptionTypeToIncludeID(): string { - if (criterion.criterionOption.type === "studios") { + if (inputType === "studios") { return "include-sub-studios"; } - if (criterion.criterionOption.type === "childTags") { + if (type === "children") { return "include-parent-tags"; } return "include-sub-tags"; @@ -62,9 +61,9 @@ export const HierarchicalLabelValueFilter: React.FC< function criterionOptionTypeToIncludeUIString(): MessageDescriptor { const optionType = - criterion.criterionOption.type === "studios" + inputType === "studios" ? "include_sub_studios" - : criterion.criterionOption.type === "childTags" + : type === "children" ? "include_parent_tags" : "include_sub_tags"; return { @@ -76,7 +75,7 @@ export const HierarchicalLabelValueFilter: React.FC< <> labeled.id)} diff --git a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx index f06e5c21bdc..13824e08b8b 100644 --- a/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/LabeledIdFilter.tsx @@ -13,18 +13,19 @@ export const LabeledIdFilter: React.FC = ({ criterion, onValueChanged, }) => { + const { criterionOption } = criterion; + const { inputType } = criterionOption; + if ( - criterion.criterionOption.type !== "performers" && - criterion.criterionOption.type !== "studios" && - criterion.criterionOption.type !== "parent_studios" && - criterion.criterionOption.type !== "tags" && - criterion.criterionOption.type !== "sceneTags" && - criterion.criterionOption.type !== "performerTags" && - criterion.criterionOption.type !== "parentTags" && - criterion.criterionOption.type !== "childTags" && - criterion.criterionOption.type !== "movies" - ) + inputType !== "performers" && + inputType !== "studios" && + inputType !== "scene_tags" && + inputType !== "performer_tags" && + inputType !== "tags" && + inputType !== "movies" + ) { return null; + } function onSelectionChanged(items: SelectObject[]) { onValueChanged( @@ -38,7 +39,7 @@ export const LabeledIdFilter: React.FC = ({ return ( labeled.id)} diff --git a/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx b/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx index 2c13eb57e81..08f5a251462 100644 --- a/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx +++ b/ui/v2.5/src/components/List/Filters/SelectableFilter.tsx @@ -320,7 +320,7 @@ export const HierarchicalObjectsFilter = < if (criterion.criterionOption.type === "studios") { return "include-sub-studios"; } - if (criterion.criterionOption.type === "childTags") { + if (criterion.criterionOption.type === "children") { return "include-parent-tags"; } return "include-sub-tags"; @@ -330,7 +330,7 @@ export const HierarchicalObjectsFilter = < const optionType = criterion.criterionOption.type === "studios" ? "include_sub_studios" - : criterion.criterionOption.type === "childTags" + : criterion.criterionOption.type === "children" ? "include_parent_tags" : "include_sub_tags"; return { diff --git a/ui/v2.5/src/components/List/ItemList.tsx b/ui/v2.5/src/components/List/ItemList.tsx index e8cabe7cec1..8b3aa5898b1 100644 --- a/ui/v2.5/src/components/List/ItemList.tsx +++ b/ui/v2.5/src/components/List/ItemList.tsx @@ -619,8 +619,8 @@ export function makeItemList({ if (defaultFilter?.findDefaultFilter) { newFilter.currentPage = 1; try { - newFilter.configureFromJSON( - defaultFilter.findDefaultFilter.filter + newFilter.configureFromSavedFilter( + defaultFilter.findDefaultFilter ); } catch (err) { console.log(err); diff --git a/ui/v2.5/src/components/List/SavedFilterList.tsx b/ui/v2.5/src/components/List/SavedFilterList.tsx index 8a5da04735e..caa1277d6b2 100644 --- a/ui/v2.5/src/components/List/SavedFilterList.tsx +++ b/ui/v2.5/src/components/List/SavedFilterList.tsx @@ -75,7 +75,9 @@ export const SavedFilterList: React.FC = ({ id, mode: filter.mode, name, - filter: filterCopy.makeSavedFilterJSON(), + find_filter: filterCopy.makeFindFilter(), + object_filter: filterCopy.makeSavedFindFilter(), + ui_options: filterCopy.makeUIOptions(), }, }, }); @@ -143,7 +145,9 @@ export const SavedFilterList: React.FC = ({ variables: { input: { mode: filter.mode, - filter: filterCopy.makeSavedFilterJSON(), + find_filter: filterCopy.makeFindFilter(), + object_filter: filterCopy.makeSavedFindFilter(), + ui_options: filterCopy.makeUIOptions(), }, }, }); @@ -166,7 +170,7 @@ export const SavedFilterList: React.FC = ({ newFilter.currentPage = 1; // #1795 - reset search term if not present in saved filter newFilter.searchTerm = ""; - newFilter.configureFromJSON(f.filter); + newFilter.configureFromSavedFilter(f); // #1507 - reset random seed when loaded newFilter.randomSeed = -1; diff --git a/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx b/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx index 37ccec8f637..f11edf6e8c3 100644 --- a/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx +++ b/ui/v2.5/src/components/Movies/MovieDetails/MovieScrapeDialog.tsx @@ -3,16 +3,16 @@ import { useIntl } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { ScrapeDialog, - ScrapeResult, ScrapedInputGroupRow, ScrapedImageRow, ScrapeDialogRow, ScrapedTextAreaRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import { StudioSelect } from "src/components/Shared/Select"; import DurationUtils from "src/utils/duration"; import { useStudioCreate } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; function renderScrapedStudio( result: ScrapeResult, diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx index 015789fe173..8d42a6c2a22 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/Performer.tsx @@ -57,6 +57,7 @@ interface IPerformerParams { } const validTabs = [ + "default", "scenes", "galleries", "images", @@ -65,7 +66,7 @@ const validTabs = [ ] as const; type TabKey = (typeof validTabs)[number]; -const defaultTab: TabKey = "scenes"; +const defaultTab: TabKey = "default"; function isTabKey(tab: string): tab is TabKey { return validTabs.includes(tab as TabKey); @@ -82,7 +83,7 @@ const PerformerPage: React.FC = ({ performer, tabKey }) => { const abbreviateCounter = uiConfig?.abbreviateCounters ?? false; const enableBackgroundImage = uiConfig?.enablePerformerBackgroundImage ?? false; - const showAllDetails = uiConfig?.showAllDetails ?? false; + const showAllDetails = uiConfig?.showAllDetails ?? true; const compactExpandedDetails = uiConfig?.compactExpandedDetails ?? false; const [collapsed, setCollapsed] = useState(!showAllDetails); @@ -117,11 +118,30 @@ const PerformerPage: React.FC = ({ performer, tabKey }) => { const [updatePerformer] = usePerformerUpdate(); const [deletePerformer, { loading: isDestroying }] = usePerformerDestroy(); + const populatedDefaultTab = useMemo(() => { + let ret: TabKey = "scenes"; + if (performer.scene_count == 0) { + if (performer.gallery_count != 0) { + ret = "galleries"; + } else if (performer.image_count != 0) { + ret = "images"; + } else if (performer.movie_count != 0) { + ret = "movies"; + } + } + + return ret; + }, [performer]); + + if (tabKey === defaultTab) { + tabKey = populatedDefaultTab; + } + function setTabKey(newTabKey: string | null) { - if (!newTabKey) newTabKey = defaultTab; + if (!newTabKey || newTabKey === defaultTab) newTabKey = populatedDefaultTab; if (newTabKey === tabKey) return; - if (newTabKey === defaultTab) { + if (newTabKey === populatedDefaultTab) { history.replace(`/performers/${performer.id}`); } else if (isTabKey(newTabKey)) { history.replace(`/performers/${performer.id}/${newTabKey}`); diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx index 897bd17dd1f..2baf1d8711f 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerScrapeDialog.tsx @@ -3,13 +3,12 @@ import { useIntl } from "react-intl"; import * as GQL from "src/core/generated-graphql"; import { ScrapeDialog, - ScrapeResult, ScrapedInputGroupRow, ScrapedImagesRow, ScrapeDialogRow, ScrapedTextAreaRow, ScrapedCountryRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import { useTagCreate } from "src/core/StashService"; import { Form } from "react-bootstrap"; import { TagSelect } from "src/components/Shared/Select"; @@ -26,6 +25,7 @@ import { stringToCircumcised, } from "src/utils/circumcised"; import { IStashBox } from "./PerformerStashBoxModal"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; function renderScrapedGender( result: ScrapeResult, diff --git a/ui/v2.5/src/components/Performers/PerformerSelect.tsx b/ui/v2.5/src/components/Performers/PerformerSelect.tsx new file mode 100644 index 00000000000..c721d652deb --- /dev/null +++ b/ui/v2.5/src/components/Performers/PerformerSelect.tsx @@ -0,0 +1,241 @@ +import React, { useEffect, useState } from "react"; +import { + OptionProps, + components as reactSelectComponents, + MultiValueGenericProps, + SingleValueProps, +} from "react-select"; + +import * as GQL from "src/core/generated-graphql"; +import { + usePerformerCreate, + queryFindPerformersByIDForSelect, + queryFindPerformersForSelect, +} from "src/core/StashService"; +import { ConfigurationContext } from "src/hooks/Config"; +import { useIntl } from "react-intl"; +import { defaultMaxOptionsShown, IUIConfig } from "src/core/config"; +import { ListFilterModel } from "src/models/list-filter/filter"; +import { + FilterSelectComponent, + IFilterIDProps, + IFilterProps, + IFilterValueProps, + Option as SelectOption, +} from "../Shared/FilterSelect"; +import { useCompare } from "src/hooks/state"; + +export type SelectObject = { + id: string; + name?: string | null; + title?: string | null; +}; + +export type Performer = Pick< + GQL.Performer, + "id" | "name" | "alias_list" | "disambiguation" +>; +type Option = SelectOption; + +export const PerformerSelect: React.FC< + IFilterProps & IFilterValueProps +> = (props) => { + const [createPerformer] = usePerformerCreate(); + + const { configuration } = React.useContext(ConfigurationContext); + const intl = useIntl(); + const maxOptionsShown = + (configuration?.ui as IUIConfig).maxOptionsShown ?? defaultMaxOptionsShown; + const defaultCreatable = + !configuration?.interface.disableDropdownCreate.performer ?? true; + + async function loadPerformers(input: string): Promise { + const filter = new ListFilterModel(GQL.FilterMode.Performers); + filter.searchTerm = input; + filter.currentPage = 1; + filter.itemsPerPage = maxOptionsShown; + filter.sortBy = "name"; + filter.sortDirection = GQL.SortDirectionEnum.Asc; + const query = await queryFindPerformersForSelect(filter); + return query.data.findPerformers.performers.map((performer) => ({ + value: performer.id, + object: performer, + })); + } + + const PerformerOption: React.FC> = ( + optionProps + ) => { + let thisOptionProps = optionProps; + + const { object } = optionProps.data; + + let { name } = object; + + // if name does not match the input value but an alias does, show the alias + const { inputValue } = optionProps.selectProps; + let alias: string | undefined = ""; + if (!name.toLowerCase().includes(inputValue.toLowerCase())) { + alias = object.alias_list?.find((a) => + a.toLowerCase().includes(inputValue.toLowerCase()) + ); + } + + thisOptionProps = { + ...optionProps, + children: ( + + {name} + {object.disambiguation && ( + {` (${object.disambiguation})`} + )} + {alias && {` (${alias})`}} + + ), + }; + + return ; + }; + + const PerformerMultiValueLabel: React.FC< + MultiValueGenericProps + > = (optionProps) => { + let thisOptionProps = optionProps; + + const { object } = optionProps.data; + + thisOptionProps = { + ...optionProps, + children: object.name, + }; + + return ; + }; + + const PerformerValueLabel: React.FC> = ( + optionProps + ) => { + let thisOptionProps = optionProps; + + const { object } = optionProps.data; + + thisOptionProps = { + ...optionProps, + children: object.name, + }; + + return ; + }; + + const onCreate = async (name: string) => { + const result = await createPerformer({ + variables: { input: { name } }, + }); + return { + value: result.data!.performerCreate!.id, + item: result.data!.performerCreate!, + message: "Created performer", + }; + }; + + const getNamedObject = (id: string, name: string) => { + return { + id, + name, + alias_list: [], + }; + }; + + const isValidNewOption = (inputValue: string, options: Performer[]) => { + if (!inputValue) { + return false; + } + + if ( + options.some((o) => { + return ( + o.name.toLowerCase() === inputValue.toLowerCase() || + o.alias_list?.some( + (a) => a.toLowerCase() === inputValue.toLowerCase() + ) + ); + }) + ) { + return false; + } + + return true; + }; + + return ( + + {...props} + loadOptions={loadPerformers} + getNamedObject={getNamedObject} + isValidNewOption={isValidNewOption} + components={{ + Option: PerformerOption, + MultiValueLabel: PerformerMultiValueLabel, + SingleValue: PerformerValueLabel, + }} + isMulti={props.isMulti ?? false} + creatable={props.creatable ?? defaultCreatable} + onCreate={onCreate} + placeholder={ + props.noSelectionString ?? + intl.formatMessage( + { id: "actions.select_entity" }, + { entityType: intl.formatMessage({ id: "performer" }) } + ) + } + /> + ); +}; + +export const PerformerIDSelect: React.FC< + IFilterProps & IFilterIDProps +> = (props) => { + const { ids, onSelect: onSelectValues } = props; + + const [values, setValues] = useState([]); + const idsChanged = useCompare(ids); + + function onSelect(items: Performer[]) { + setValues(items); + onSelectValues?.(items); + } + + async function loadObjectsByID(idsToLoad: string[]): Promise { + const performerIDs = idsToLoad.map((id) => parseInt(id)); + const query = await queryFindPerformersByIDForSelect(performerIDs); + const { performers: loadedPerformers } = query.data.findPerformers; + + return loadedPerformers; + } + + useEffect(() => { + if (!idsChanged) { + return; + } + + if (!ids || ids?.length === 0) { + setValues([]); + return; + } + + // load the values if we have ids and they haven't been loaded yet + const filteredValues = values.filter((v) => ids.includes(v.id.toString())); + if (filteredValues.length === ids.length) { + return; + } + + const load = async () => { + const items = await loadObjectsByID(ids); + setValues(items); + }; + + load(); + }, [ids, idsChanged, values]); + + return ; +}; diff --git a/ui/v2.5/src/components/Performers/styles.scss b/ui/v2.5/src/components/Performers/styles.scss index 4a0ec524ac2..4451728043b 100644 --- a/ui/v2.5/src/components/Performers/styles.scss +++ b/ui/v2.5/src/components/Performers/styles.scss @@ -223,3 +223,7 @@ content: ""; } } + +.react-select .alias { + font-weight: bold; +} diff --git a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx index bb28f61f3dc..a1b9a7ae9bf 100644 --- a/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx +++ b/ui/v2.5/src/components/ScenePlayer/ScenePlayer.tsx @@ -8,6 +8,7 @@ import React, { useState, } from "react"; import videojs, { VideoJsPlayer, VideoJsPlayerOptions } from "video.js"; +import abLoopPlugin from "videojs-abloop"; import useScript from "src/hooks/useScript"; import "videojs-contrib-dash"; import "videojs-mobile-ui"; @@ -73,6 +74,21 @@ function handleHotkeys(player: VideoJsPlayer, event: videojs.KeyboardEvent) { player.currentTime(time); } + function toggleABLooping() { + const opts = player.abLoopPlugin.getOptions(); + if (!opts.start) { + opts.start = player.currentTime(); + } else if (!opts.end) { + opts.end = player.currentTime(); + opts.enabled = true; + } else { + opts.start = 0; + opts.end = 0; + opts.enabled = false; + } + player.abLoopPlugin.setOptions(opts); + } + let seekFactor = 10; if (event.shiftKey) { seekFactor = 5; @@ -111,6 +127,9 @@ function handleHotkeys(player: VideoJsPlayer, event: videojs.KeyboardEvent) { if (player.isFullscreen()) player.exitFullscreen(); else player.requestFullscreen(); break; + case 76: // l + toggleABLooping(); + break; case 38: // up arrow player.volume(player.volume() + 0.1); break; @@ -340,6 +359,16 @@ export const ScenePlayer: React.FC = ({ skipButtons: {}, trackActivity: {}, vrMenu: {}, + abLoopPlugin: { + start: 0, + end: false, + enabled: false, + loopIfBeforeStart: true, + loopIfAfterEnd: true, + pauseAfterLooping: false, + pauseBeforeLooping: false, + createButtons: uiConfig?.showAbLoopControls ?? false, + }, }, }; @@ -349,6 +378,8 @@ export const ScenePlayer: React.FC = ({ videoEl.classList.add("vjs-big-play-centered"); videoRef.current!.appendChild(videoEl); + abLoopPlugin(window, videojs); + const vjs = videojs(videoEl, options); /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ @@ -372,7 +403,8 @@ export const ScenePlayer: React.FC = ({ sceneId.current = undefined; }; // empty deps - only init once - }, []); + // showAbLoopControls is necessary to re-init the player when the config changes + }, [uiConfig?.showAbLoopControls]); useEffect(() => { const player = getPlayer(); diff --git a/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx b/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx index 68dfbb406da..c6cf120fba1 100644 --- a/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx +++ b/ui/v2.5/src/components/ScenePlayer/ScenePlayerScrubber.tsx @@ -6,15 +6,14 @@ import React, { useCallback, } from "react"; import { Button } from "react-bootstrap"; -import axios from "axios"; import * as GQL from "src/core/generated-graphql"; import TextUtils from "src/utils/text"; -import { WebVTT } from "videojs-vtt.js"; import { Icon } from "src/components/Shared/Icon"; import { faChevronRight, faChevronLeft, } from "@fortawesome/free-solid-svg-icons"; +import { useSpriteInfo } from "src/hooks/sprite"; interface IScenePlayerScrubberProps { file: GQL.VideoFileDataFragment; @@ -29,42 +28,6 @@ interface ISceneSpriteItem { time: string; } -interface ISceneSpriteInfo { - url: string; - start: number; - end: number; - x: number; - y: number; - w: number; - h: number; -} - -async function fetchSpriteInfo(vttPath: string) { - const response = await axios.get(vttPath, { responseType: "text" }); - - const sprites: ISceneSpriteInfo[] = []; - - const parser = new WebVTT.Parser(window, WebVTT.StringDecoder()); - parser.oncue = (cue: VTTCue) => { - const match = cue.text.match(/^([^#]*)#xywh=(\d+),(\d+),(\d+),(\d+)$/i); - if (!match) return; - - sprites.push({ - url: new URL(match[1], vttPath).href, - start: cue.startTime, - end: cue.endTime, - x: Number(match[2]), - y: Number(match[3]), - w: Number(match[4]), - h: Number(match[5]), - }); - }; - parser.parse(response.data); - parser.flush(); - - return sprites; -} - export const ScenePlayerScrubber: React.FC = ({ file, scene, @@ -119,34 +82,32 @@ export const ScenePlayerScrubber: React.FC = ({ [onSeek, file.duration, scrubWidth] ); + const spriteInfo = useSpriteInfo(scene.paths.vtt ?? undefined); const [spriteItems, setSpriteItems] = useState(); useEffect(() => { - if (!scene.paths.vtt) return; - fetchSpriteInfo(scene.paths.vtt).then((sprites) => { - if (!sprites) return; - let totalWidth = 0; - const newSprites = sprites?.map((sprite, index) => { - totalWidth += sprite.w; - const left = sprite.w * index; - const style = { - width: `${sprite.w}px`, - height: `${sprite.h}px`, - backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, - backgroundImage: `url(${sprite.url})`, - left: `${left}px`, - }; - const start = TextUtils.secondsToTimestamp(sprite.start); - const end = TextUtils.secondsToTimestamp(sprite.end); - return { - style, - time: `${start} - ${end}`, - }; - }); - setScrubWidth(totalWidth); - setSpriteItems(newSprites); + if (!spriteInfo) return; + let totalWidth = 0; + const newSprites = spriteInfo?.map((sprite, index) => { + totalWidth += sprite.w; + const left = sprite.w * index; + const style = { + width: `${sprite.w}px`, + height: `${sprite.h}px`, + backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, + backgroundImage: `url(${sprite.url})`, + left: `${left}px`, + }; + const start = TextUtils.secondsToTimestamp(sprite.start); + const end = TextUtils.secondsToTimestamp(sprite.end); + return { + style, + time: `${start} - ${end}`, + }; }); - }, [scene]); + setScrubWidth(totalWidth); + setSpriteItems(newSprites); + }, [spriteInfo]); useEffect(() => { const onResize = (entries: ResizeObserverEntry[]) => { diff --git a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx new file mode 100644 index 00000000000..201d0af210e --- /dev/null +++ b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx @@ -0,0 +1,173 @@ +import React, { useMemo } from "react"; +import { useDebounce } from "src/hooks/debounce"; +import { useSpriteInfo } from "src/hooks/sprite"; +import TextUtils from "src/utils/text"; + +interface IHoverScrubber { + totalSprites: number; + activeIndex: number | undefined; + setActiveIndex: (index: number | undefined) => void; + onClick?: (index: number) => void; +} + +const HoverScrubber: React.FC = ({ + totalSprites, + activeIndex, + setActiveIndex, + onClick, +}) => { + function getActiveIndex(e: React.MouseEvent) { + const { width } = e.currentTarget.getBoundingClientRect(); + const x = e.nativeEvent.offsetX; + + return Math.floor((x / width) * (totalSprites - 1)); + } + + function onMouseMove(e: React.MouseEvent) { + const relatedTarget = e.currentTarget; + + if (relatedTarget !== e.target) return; + + setActiveIndex(getActiveIndex(e)); + } + + function onMouseLeave() { + setActiveIndex(undefined); + } + + function onScrubberClick(e: React.MouseEvent) { + if (!onClick) return; + + const relatedTarget = e.currentTarget; + + if (relatedTarget !== e.target) return; + + e.preventDefault(); + onClick(getActiveIndex(e)); + } + + const indicatorStyle = useMemo(() => { + if (activeIndex === undefined) return {}; + + const width = (activeIndex / totalSprites) * 100; + + return { + width: `${width}%`, + }; + }, [activeIndex, totalSprites]); + + return ( +
+
+
+ {activeIndex !== undefined && ( +
+ )} +
+
+ ); +}; + +interface IScenePreviewProps { + vttPath: string | undefined; + onClick?: (timestamp: number) => void; +} + +function scaleToFit(dimensions: { w: number; h: number }, bounds: DOMRect) { + const rw = bounds.width / dimensions.w; + const rh = bounds.height / dimensions.h; + + // for consistency, use max by default and min for portrait + if (dimensions.w > dimensions.h) { + return Math.max(rw, rh); + } + + return Math.min(rw, rh); +} + +export const PreviewScrubber: React.FC = ({ + vttPath, + onClick, +}) => { + const imageParentRef = React.useRef(null); + + const [activeIndex, setActiveIndex] = React.useState(); + + const debounceSetActiveIndex = useDebounce( + setActiveIndex, + [setActiveIndex], + 1 + ); + + const spriteInfo = useSpriteInfo(vttPath); + + const style = useMemo(() => { + if (!spriteInfo || activeIndex === undefined || !imageParentRef.current) { + return {}; + } + + const sprite = spriteInfo[activeIndex]; + + const clientRect = imageParentRef.current?.getBoundingClientRect(); + const scale = clientRect ? scaleToFit(sprite, clientRect) : 1; + + return { + backgroundPosition: `${-sprite.x}px ${-sprite.y}px`, + backgroundImage: `url(${sprite.url})`, + width: `${sprite.w}px`, + height: `${sprite.h}px`, + transform: `scale(${scale})`, + }; + }, [spriteInfo, activeIndex, imageParentRef]); + + const currentTime = useMemo(() => { + if (!spriteInfo || activeIndex === undefined) { + return undefined; + } + + const sprite = spriteInfo[activeIndex]; + + const start = TextUtils.secondsToTimestamp(sprite.start); + + return start; + }, [activeIndex, spriteInfo]); + + function onScrubberClick(index: number) { + if (!spriteInfo || !onClick) { + return; + } + + const sprite = spriteInfo[index]; + + onClick(sprite.start); + } + + if (!spriteInfo) return null; + + return ( +
+ {activeIndex !== undefined && spriteInfo && ( +
+
+ {currentTime !== undefined && ( +
{currentTime}
+ )} +
+ )} + debounceSetActiveIndex(i)} + onClick={onScrubberClick} + /> +
+ ); +}; diff --git a/ui/v2.5/src/components/Scenes/SceneCard.tsx b/ui/v2.5/src/components/Scenes/SceneCard.tsx index bceb91eac62..9d31aceb250 100644 --- a/ui/v2.5/src/components/Scenes/SceneCard.tsx +++ b/ui/v2.5/src/components/Scenes/SceneCard.tsx @@ -1,6 +1,6 @@ import React, { useEffect, useMemo, useRef } from "react"; import { Button, ButtonGroup } from "react-bootstrap"; -import { Link } from "react-router-dom"; +import { Link, useHistory } from "react-router-dom"; import cx from "classnames"; import * as GQL from "src/core/generated-graphql"; import { Icon } from "../Shared/Icon"; @@ -25,12 +25,15 @@ import { faTag, } from "@fortawesome/free-solid-svg-icons"; import { objectPath, objectTitle } from "src/core/files"; +import { PreviewScrubber } from "./PreviewScrubber"; interface IScenePreviewProps { isPortrait: boolean; image?: string; video?: string; soundActive: boolean; + vttPath?: string; + onScrubberClick?: (timestamp: number) => void; } export const ScenePreview: React.FC = ({ @@ -38,6 +41,8 @@ export const ScenePreview: React.FC = ({ video, isPortrait, soundActive, + vttPath, + onScrubberClick, }) => { const videoEl = useRef(null); @@ -72,6 +77,7 @@ export const ScenePreview: React.FC = ({ ref={videoEl} src={video} /> +
); }; @@ -90,6 +96,7 @@ interface ISceneCardProps { export const SceneCard: React.FC = ( props: ISceneCardProps ) => { + const history = useHistory(); const { configuration } = React.useContext(ConfigurationContext); const file = useMemo( @@ -383,6 +390,18 @@ export const SceneCard: React.FC = ( }) : `/scenes/${props.scene.id}`; + function onScrubberClick(timestamp: number) { + const link = props.queue + ? props.queue.makeLink(props.scene.id, { + sceneIndex: props.index, + continue: cont, + start: timestamp, + }) + : `/scenes/${props.scene.id}?t=${timestamp}`; + + history.push(link); + } + return ( = ( video={props.scene.paths.preview ?? undefined} isPortrait={isPortrait()} soundActive={configuration?.interface?.soundOnPreview ?? false} + vttPath={props.scene.paths.vtt ?? undefined} + onScrubberClick={onScrubberClick} /> {maybeRenderSceneSpecsOverlay()} diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx index 15989fa3cb6..7842f104dbd 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneEditPanel.tsx @@ -20,7 +20,6 @@ import { queryScrapeSceneQueryFragment, } from "src/core/StashService"; import { - PerformerSelect, TagSelect, StudioSelect, GallerySelect, @@ -51,6 +50,10 @@ import { useRatingKeybinds } from "src/hooks/keybinds"; import { lazyComponent } from "src/utils/lazyComponent"; import isEqual from "lodash-es/isEqual"; import { DateInput } from "src/components/Shared/DateInput"; +import { + Performer, + PerformerSelect, +} from "src/components/Performers/PerformerSelect"; const SceneScrapeDialog = lazyComponent(() => import("./SceneScrapeDialog")); const SceneQueryModal = lazyComponent(() => import("./SceneQueryModal")); @@ -78,6 +81,7 @@ export const SceneEditPanel: React.FC = ({ const [galleries, setGalleries] = useState<{ id: string; title: string }[]>( [] ); + const [performers, setPerformers] = useState([]); const Scrapers = useListSceneScrapers(); const [fragmentScrapers, setFragmentScrapers] = useState([]); @@ -98,6 +102,10 @@ export const SceneEditPanel: React.FC = ({ ); }, [scene.galleries]); + useEffect(() => { + setPerformers(scene.performers ?? []); + }, [scene.performers]); + const { configuration: stashConfig } = React.useContext(ConfigurationContext); // Network state @@ -218,6 +226,14 @@ export const SceneEditPanel: React.FC = ({ ); } + function onSetPerformers(items: Performer[]) { + setPerformers(items); + formik.setFieldValue( + "performer_ids", + items.map((item) => item.id) + ); + } + useRatingKeybinds( isVisible, stashConfig?.ui?.ratingSystemOptions?.type, @@ -414,6 +430,7 @@ export const SceneEditPanel: React.FC = ({ return ( onScrapeDialogClosed(s)} @@ -581,8 +598,15 @@ export const SceneEditPanel: React.FC = ({ }); if (idPerfs.length > 0) { - const newIds = idPerfs.map((p) => p.stored_id); - formik.setFieldValue("performer_ids", newIds as string[]); + onSetPerformers( + idPerfs.map((p) => { + return { + id: p.stored_id!, + name: p.name ?? "", + alias_list: [], + }; + }) + ); } } @@ -852,13 +876,8 @@ export const SceneEditPanel: React.FC = ({ - formik.setFieldValue( - "performer_ids", - items.map((item) => item.id) - ) - } - ids={formik.values.performer_ids} + onSelect={onSetPerformers} + values={performers} />
diff --git a/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx b/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx index a75d7eac3e0..b08dab36b49 100644 --- a/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneDetails/SceneScrapeDialog.tsx @@ -1,292 +1,43 @@ -import React, { useMemo, useState } from "react"; +import React, { useState } from "react"; import * as GQL from "src/core/generated-graphql"; -import { - MovieSelect, - TagSelect, - StudioSelect, - PerformerSelect, -} from "src/components/Shared/Select"; import { ScrapeDialog, - ScrapeDialogRow, - ScrapeResult, ScrapedInputGroupRow, ScrapedTextAreaRow, ScrapedImageRow, - IHasName, ScrapedStringListRow, -} from "src/components/Shared/ScrapeDialog"; +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; import clone from "lodash-es/clone"; -import { - useStudioCreate, - usePerformerCreate, - useMovieCreate, - useTagCreate, -} from "src/core/StashService"; -import { useToast } from "src/hooks/Toast"; import { useIntl } from "react-intl"; import { uniq } from "lodash-es"; -import { scrapedPerformerToCreateInput } from "src/core/performers"; -import { scrapedMovieToCreateInput } from "src/core/movies"; - -interface IScrapedStudioRow { - title: string; - result: ScrapeResult; - onChange: (value: ScrapeResult) => void; - newStudio?: GQL.ScrapedStudio; - onCreateNew?: (value: GQL.ScrapedStudio) => void; -} - -export const ScrapedStudioRow: React.FC = ({ - title, - result, - onChange, - newStudio, - onCreateNew, -}) => { - function renderScrapedStudio( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ? [resultValue] : []; - - return ( - { - if (onChangeFn) { - onChangeFn(items[0]?.id); - } - }} - ids={value} - /> - ); - } - - return ( - renderScrapedStudio(result)} - renderNewField={() => - renderScrapedStudio(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={newStudio ? [newStudio] : undefined} - onCreateNew={() => { - if (onCreateNew && newStudio) onCreateNew(newStudio); - }} - /> - ); -}; - -interface IScrapedObjectsRow { - title: string; - result: ScrapeResult; - onChange: (value: ScrapeResult) => void; - newObjects?: T[]; - onCreateNew?: (value: T) => void; - renderObjects: ( - result: ScrapeResult, - isNew?: boolean, - onChange?: (value: string[]) => void - ) => JSX.Element; -} - -export const ScrapedObjectsRow = ( - props: IScrapedObjectsRow -) => { - const { title, result, onChange, newObjects, onCreateNew, renderObjects } = - props; - - return ( - renderObjects(result)} - renderNewField={() => - renderObjects(result, true, (value) => - onChange(result.cloneWithValue(value)) - ) - } - onChange={onChange} - newValues={newObjects} - onCreateNew={(i) => { - if (onCreateNew) onCreateNew(newObjects![i]); - }} - /> - ); -}; - -type IScrapedObjectRowImpl = Omit, "renderObjects">; - -export const ScrapedPerformersRow: React.FC< - IScrapedObjectRowImpl -> = ({ title, result, onChange, newObjects, onCreateNew }) => { - const performersCopy = useMemo(() => { - return ( - newObjects?.map((p) => { - const name: string = p.name ?? ""; - return { ...p, name }; - }) ?? [] - ); - }, [newObjects]); - - type PerformerType = GQL.ScrapedPerformer & { - name: string; - }; - - function renderScrapedPerformers( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string[]) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChangeFn) { - onChangeFn(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); - } - - return ( - - title={title} - result={result} - renderObjects={renderScrapedPerformers} - onChange={onChange} - newObjects={performersCopy} - onCreateNew={onCreateNew} - /> - ); -}; - -export const ScrapedMoviesRow: React.FC< - IScrapedObjectRowImpl -> = ({ title, result, onChange, newObjects, onCreateNew }) => { - const moviesCopy = useMemo(() => { - return ( - newObjects?.map((p) => { - const name: string = p.name ?? ""; - return { ...p, name }; - }) ?? [] - ); - }, [newObjects]); - - type MovieType = GQL.ScrapedMovie & { - name: string; - }; - - function renderScrapedMovies( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string[]) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChangeFn) { - onChangeFn(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); - } - - return ( - - title={title} - result={result} - renderObjects={renderScrapedMovies} - onChange={onChange} - newObjects={moviesCopy} - onCreateNew={onCreateNew} - /> - ); -}; - -export const ScrapedTagsRow: React.FC< - IScrapedObjectRowImpl -> = ({ title, result, onChange, newObjects, onCreateNew }) => { - function renderScrapedTags( - scrapeResult: ScrapeResult, - isNew?: boolean, - onChangeFn?: (value: string[]) => void - ) { - const resultValue = isNew - ? scrapeResult.newValue - : scrapeResult.originalValue; - const value = resultValue ?? []; - - return ( - { - if (onChangeFn) { - onChangeFn(items.map((i) => i.id)); - } - }} - ids={value} - /> - ); - } - - return ( - - title={title} - result={result} - renderObjects={renderScrapedTags} - onChange={onChange} - newObjects={newObjects} - onCreateNew={onCreateNew} - /> - ); -}; +import { Performer } from "src/components/Performers/PerformerSelect"; +import { IHasStoredID, sortStoredIdObjects } from "src/utils/data"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; +import { + ScrapedMoviesRow, + ScrapedPerformersRow, + ScrapedStudioRow, + ScrapedTagsRow, +} from "src/components/Shared/ScrapeDialog/ScrapedObjectsRow"; +import { + useCreateScrapedMovie, + useCreateScrapedPerformer, + useCreateScrapedStudio, + useCreateScrapedTag, +} from "src/components/Shared/ScrapeDialog/createObjects"; interface ISceneScrapeDialogProps { scene: Partial; + scenePerformers: Performer[]; scraped: GQL.ScrapedScene; endpoint?: string; onClose: (scrapedScene?: GQL.ScrapedScene) => void; } -interface IHasStoredID { - stored_id?: string | null; -} - export const SceneScrapeDialog: React.FC = ({ scene, + scenePerformers, scraped, onClose, endpoint, @@ -365,10 +116,17 @@ export const SceneScrapeDialog: React.FC = ({ return ret; } - const [performers, setPerformers] = useState>( - new ScrapeResult( - sortIdList(scene.performer_ids), - mapStoredIdObjects(scraped.performers ?? undefined) + const [performers, setPerformers] = useState< + ScrapeResult + >( + new ScrapeResult( + sortStoredIdObjects( + scenePerformers.map((p) => ({ + stored_id: p.id, + name: p.name, + })) + ), + sortStoredIdObjects(scraped.performers ?? undefined) ) ); const [newPerformers, setNewPerformers] = useState( @@ -403,13 +161,34 @@ export const SceneScrapeDialog: React.FC = ({ new ScrapeResult(scene.cover_image, scraped.image) ); - const [createStudio] = useStudioCreate(); - const [createPerformer] = usePerformerCreate(); - const [createMovie] = useMovieCreate(); - const [createTag] = useTagCreate(); + const createNewStudio = useCreateScrapedStudio({ + scrapeResult: studio, + setScrapeResult: setStudio, + setNewObject: setNewStudio, + }); + + const createNewPerformer = useCreateScrapedPerformer({ + scrapeResult: performers, + setScrapeResult: setPerformers, + newObjects: newPerformers, + setNewObjects: setNewPerformers, + }); + + const createNewMovie = useCreateScrapedMovie({ + scrapeResult: movies, + setScrapeResult: setMovies, + newObjects: newMovies, + setNewObjects: setNewMovies, + }); + + const createNewTag = useCreateScrapedTag({ + scrapeResult: tags, + setScrapeResult: setTags, + newObjects: newTags, + setNewObjects: setNewTags, + }); const intl = useIntl(); - const Toast = useToast(); // don't show the dialog if nothing was scraped if ( @@ -436,143 +215,6 @@ export const SceneScrapeDialog: React.FC = ({ return <>; } - async function createNewStudio(toCreate: GQL.ScrapedStudio) { - try { - const result = await createStudio({ - variables: { - input: { - name: toCreate.name, - url: toCreate.url, - }, - }, - }); - - // set the new studio as the value - setStudio(studio.cloneWithValue(result.data!.studioCreate!.id)); - setNewStudio(undefined); - - Toast.success({ - content: ( - - Created studio: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewPerformer(toCreate: GQL.ScrapedPerformer) { - const input = scrapedPerformerToCreateInput(toCreate); - - try { - const result = await createPerformer({ - variables: { input }, - }); - - const newValue = [...(performers.newValue ?? [])]; - if (result.data?.performerCreate) - newValue.push(result.data.performerCreate.id); - - // add the new performer to the new performers value - const performerClone = performers.cloneWithValue(newValue); - setPerformers(performerClone); - - // remove the performer from the list - const newPerformersClone = newPerformers.concat(); - const pIndex = newPerformersClone.findIndex( - (p) => p.name === toCreate.name - ); - if (pIndex === -1) throw new Error("Could not find performer to remove"); - - newPerformersClone.splice(pIndex, 1); - - setNewPerformers(newPerformersClone); - - Toast.success({ - content: ( - - Created performer: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewMovie(toCreate: GQL.ScrapedMovie) { - const movieInput = scrapedMovieToCreateInput(toCreate); - try { - const result = await createMovie({ - variables: { input: movieInput }, - }); - - // add the new movie to the new movies value - const movieClone = movies.cloneWithValue(movies.newValue); - if (!movieClone.newValue) { - movieClone.newValue = []; - } - movieClone.newValue.push(result.data!.movieCreate!.id); - setMovies(movieClone); - - // remove the movie from the list - const newMoviesClone = newMovies.concat(); - const pIndex = newMoviesClone.findIndex((p) => p.name === toCreate.name); - if (pIndex === -1) throw new Error("Could not find movie to remove"); - newMoviesClone.splice(pIndex, 1); - - setNewMovies(newMoviesClone); - - Toast.success({ - content: ( - - Created movie: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - - async function createNewTag(toCreate: GQL.ScrapedTag) { - const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; - try { - const result = await createTag({ - variables: { - input: tagInput, - }, - }); - - const newValue = [...(tags.newValue ?? [])]; - if (result.data?.tagCreate) newValue.push(result.data.tagCreate.id); - - // add the new tag to the new tags value - const tagClone = tags.cloneWithValue(newValue); - setTags(tagClone); - - // remove the tag from the list - const newTagsClone = newTags.concat(); - const pIndex = newTagsClone.indexOf(toCreate); - if (pIndex === -1) throw new Error("Could not find tag to remove"); - newTagsClone.splice(pIndex, 1); - - setNewTags(newTagsClone); - - Toast.success({ - content: ( - - Created tag: {toCreate.name} - - ), - }); - } catch (e) { - Toast.error(e); - } - } - function makeNewScrapedItem(): GQL.ScrapedSceneDataFragment { const newStudioValue = studio.getNewValue(); @@ -588,12 +230,7 @@ export const SceneScrapeDialog: React.FC = ({ name: "", } : undefined, - performers: performers.getNewValue()?.map((p) => { - return { - stored_id: p, - name: "", - }; - }), + performers: performers.getNewValue(), movies: movies.getNewValue()?.map((m) => { return { stored_id: m, diff --git a/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx b/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx index ca296a4b7dd..668d8b7ff78 100644 --- a/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMergeDialog.tsx @@ -12,26 +12,29 @@ import { FormattedMessage, useIntl } from "react-intl"; import { useToast } from "src/hooks/Toast"; import { faExchangeAlt, faSignInAlt } from "@fortawesome/free-solid-svg-icons"; import { - hasScrapedValues, ScrapeDialog, ScrapeDialogRow, ScrapedImageRow, ScrapedInputGroupRow, ScrapedStringListRow, ScrapedTextAreaRow, +} from "../Shared/ScrapeDialog/ScrapeDialog"; +import { clone, uniq } from "lodash-es"; +import { galleryTitle } from "src/core/galleries"; +import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; +import { ModalComponent } from "../Shared/Modal"; +import { IHasStoredID, sortStoredIdObjects } from "src/utils/data"; +import { ScrapeResult, ZeroableScrapeResult, -} from "../Shared/ScrapeDialog"; -import { clone, uniq } from "lodash-es"; + hasScrapedValues, +} from "../Shared/ScrapeDialog/scrapeResult"; import { ScrapedMoviesRow, ScrapedPerformersRow, ScrapedStudioRow, ScrapedTagsRow, -} from "./SceneDetails/SceneScrapeDialog"; -import { galleryTitle } from "src/core/galleries"; -import { RatingSystem } from "src/components/Shared/Rating/RatingSystem"; -import { ModalComponent } from "../Shared/Modal"; +} from "../Shared/ScrapeDialog/ScrapedObjectsRow"; interface IStashIDsField { values: GQL.StashId[]; @@ -101,8 +104,25 @@ const SceneMergeDetails: React.FC = ({ return ret; } - const [performers, setPerformers] = useState>( - new ScrapeResult(sortIdList(dest.performers.map((p) => p.id))) + function idToStoredID(o: { id: string; name: string }) { + return { + stored_id: o.id, + name: o.name, + }; + } + + function uniqIDStoredIDs(objs: IHasStoredID[]) { + return objs.filter((o, i) => { + return objs.findIndex((oo) => oo.stored_id === o.stored_id) === i; + }); + } + + const [performers, setPerformers] = useState< + ScrapeResult + >( + new ScrapeResult( + sortStoredIdObjects(dest.performers.map(idToStoredID)) + ) ); const [movies, setMovies] = useState>( @@ -184,8 +204,8 @@ const SceneMergeDetails: React.FC = ({ setPerformers( new ScrapeResult( - dest.performers.map((p) => p.id), - uniq(all.map((s) => s.performers.map((p) => p.id)).flat()) + dest.performers.map(idToStoredID), + uniqIDStoredIDs(all.map((s) => s.performers.map(idToStoredID)).flat()) ) ); setTags( @@ -559,7 +579,7 @@ const SceneMergeDetails: React.FC = ({ play_duration: playDuration.getNewValue(), gallery_ids: galleries.getNewValue(), studio_id: studio.getNewValue(), - performer_ids: performers.getNewValue(), + performer_ids: performers.getNewValue()?.map((p) => p.stored_id!), movies: movies.getNewValue()?.map((m) => { // find the equivalent movie in the original scenes const found = all diff --git a/ui/v2.5/src/components/Scenes/styles.scss b/ui/v2.5/src/components/Scenes/styles.scss index 31e5de8d1fc..a2c74cb8895 100644 --- a/ui/v2.5/src/components/Scenes/styles.scss +++ b/ui/v2.5/src/components/Scenes/styles.scss @@ -643,3 +643,66 @@ input[type="range"].blue-slider { .scrape-dialog .rating-number.disabled { padding-left: 0.5em; } + +.preview-scrubber { + height: 100%; + position: absolute; + width: 100%; + + .scene-card-preview-image { + align-items: center; + display: flex; + justify-content: center; + overflow: hidden; + } + + .scrubber-image { + height: 100%; + width: 100%; + } + + .scrubber-timestamp { + bottom: calc(20px + 0.25rem); + font-weight: 400; + opacity: 0.75; + position: absolute; + right: 0.7rem; + text-shadow: 0 0 3px #000; + } +} + +.hover-scrubber { + bottom: 0; + height: 20px; + overflow: hidden; + position: absolute; + width: 100%; + + .hover-scrubber-area { + cursor: col-resize; + height: 100%; + position: absolute; + width: 100%; + z-index: 1; + } + + .hover-scrubber-indicator { + background-color: rgba(255, 255, 255, 0.1); + bottom: -100%; + height: 100%; + position: absolute; + transition: bottom 0.2s ease-in-out; + width: 100%; + + .hover-scrubber-indicator-marker { + background-color: rgba(255, 0, 0, 0.5); + bottom: 0; + height: 5px; + position: absolute; + } + } + + &:hover .hover-scrubber-indicator { + bottom: 0; + } +} diff --git a/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx b/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx index 6b6bf69bc0d..4d8c5544c41 100644 --- a/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx +++ b/ui/v2.5/src/components/Settings/SettingsInterfacePanel/SettingsInterfacePanel.tsx @@ -364,6 +364,13 @@ export const SettingsInterfacePanel: React.FC = () => { return {DurationUtils.secondsToString(v ?? 0)}; }} /> + + saveUI({ showAbLoopControls: v })} + /> = { value: string; object: T }; + +interface ISelectProps + extends AsyncCreatableProps, IsMulti, GroupBase>> { + selectedOptions?: OnChangeValue, IsMulti>; + creatable?: boolean; + isLoading?: boolean; + isDisabled?: boolean; + placeholder?: string; + showDropdown?: boolean; + groupHeader?: string; + noOptionsMessageText?: string | null; +} + +interface IFilterSelectProps + extends Pick< + ISelectProps, + | "selectedOptions" + | "isLoading" + | "isMulti" + | "components" + | "placeholder" + | "closeMenuOnSelect" + > {} + +const getSelectedItems = ( + selectedItems: OnChangeValue, boolean> +) => { + if (Array.isArray(selectedItems)) { + return selectedItems; + } else if (selectedItems) { + return [selectedItems]; + } else { + return []; + } +}; + +const SelectComponent = ( + props: ISelectProps +) => { + const { + selectedOptions, + isLoading, + isDisabled = false, + creatable = false, + components, + placeholder, + showDropdown = true, + noOptionsMessageText: noOptionsMessage = "None", + } = props; + + const styles: StylesConfig, IsMulti> = { + option: (base) => ({ + ...base, + color: "#000", + }), + container: (base, state) => ({ + ...base, + zIndex: state.isFocused ? 10 : base.zIndex, + }), + multiValueRemove: (base, state) => ({ + ...base, + color: state.isFocused ? base.color : "#333333", + }), + }; + + const componentProps = { + ...props, + styles, + defaultOptions: true, + value: selectedOptions, + className: "react-select", + classNamePrefix: "react-select", + noOptionsMessage: () => noOptionsMessage, + placeholder: isDisabled ? "" : placeholder, + components: { + ...components, + IndicatorSeparator: () => null, + ...((!showDropdown || isDisabled) && { DropdownIndicator: () => null }), + ...(isDisabled && { MultiValueRemove: () => null }), + }, + }; + + return creatable ? ( + + ) : ( + + ); +}; + +export interface IFilterValueProps { + values?: T[]; + onSelect?: (item: T[]) => void; +} + +export interface IFilterProps { + noSelectionString?: string; + className?: string; + isMulti?: boolean; + isClearable?: boolean; + isDisabled?: boolean; + creatable?: boolean; + menuPortalTarget?: HTMLElement | null; +} + +export interface IFilterComponentProps extends IFilterProps { + loadOptions: (inputValue: string) => Promise[]>; + onCreate?: ( + name: string + ) => Promise<{ value: string; item: T; message: string }>; + getNamedObject: (id: string, name: string) => T; + isValidNewOption: (inputValue: string, options: T[]) => boolean; +} + +export const FilterSelectComponent = < + T extends IHasID, + IsMulti extends boolean +>( + props: IFilterValueProps & + IFilterComponentProps & + IFilterSelectProps +) => { + const { + values, + isMulti, + onSelect, + isValidNewOption, + getNamedObject, + loadOptions, + } = props; + const [loading, setLoading] = useState(false); + const Toast = useToast(); + + const selectedOptions = useMemo(() => { + if (isMulti && values) { + return values.map( + (value) => + ({ + object: value, + value: value.id, + } as Option) + ) as unknown as OnChangeValue, IsMulti>; + } + + if (values?.length) { + return { + object: values[0], + value: values[0].id, + } as OnChangeValue, IsMulti>; + } + }, [values, isMulti]); + + const onChange = (selectedItems: OnChangeValue, boolean>) => { + const selected = getSelectedItems(selectedItems); + + onSelect?.(selected.map((item) => item.object)); + }; + + const onCreate = async (name: string) => { + try { + setLoading(true); + const { value, item: newItem, message } = await props.onCreate!(name); + const newItemOption = { + object: newItem, + value, + } as Option; + if (!isMulti) { + onChange(newItemOption); + } else { + const o = (selectedOptions ?? []) as Option[]; + onChange([...o, newItemOption]); + } + + setLoading(false); + Toast.success({ + content: ( + + {message}: {name} + + ), + }); + } catch (e) { + Toast.error(e); + } + }; + + const getNewOptionData = ( + inputValue: string, + optionLabel: React.ReactNode + ) => { + return { + value: "", + object: getNamedObject("", optionLabel as string), + }; + }; + + const validNewOption = ( + inputValue: string, + value: Options>, + options: OptionsOrGroups, GroupBase>> + ) => { + return isValidNewOption( + inputValue, + (options as Options>).map((o) => o.object) + ); + }; + + const debounceDelay = 100; + const debounceLoadOptions = useDebounce( + (inputValue, callback) => { + loadOptions(inputValue).then(callback); + }, + [loadOptions], + debounceDelay + ); + + return ( + + {...props} + loadOptions={debounceLoadOptions} + isLoading={props.isLoading || loading} + onChange={onChange} + selectedOptions={selectedOptions} + onCreateOption={props.creatable ? onCreate : undefined} + getNewOptionData={getNewOptionData} + isValidNewOption={validNewOption} + /> + ); +}; + +export interface IFilterIDProps { + ids?: string[]; + onSelect?: (item: T[]) => void; +} diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog.tsx b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialog.tsx similarity index 84% rename from ui/v2.5/src/components/Shared/ScrapeDialog.tsx rename to ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialog.tsx index 425419ab031..8796aab0254 100644 --- a/ui/v2.5/src/components/Shared/ScrapeDialog.tsx +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapeDialog.tsx @@ -8,10 +8,9 @@ import { FormControl, Badge, } from "react-bootstrap"; -import { CollapseButton } from "./CollapseButton"; -import { Icon } from "./Icon"; -import { ModalComponent } from "./Modal"; -import isEqual from "lodash-es/isEqual"; +import { CollapseButton } from "../CollapseButton"; +import { Icon } from "../Icon"; +import { ModalComponent } from "../Modal"; import clone from "lodash-es/clone"; import { FormattedMessage, useIntl } from "react-intl"; import { @@ -21,78 +20,10 @@ import { faTimes, } from "@fortawesome/free-solid-svg-icons"; import { getCountryByISO } from "src/utils/country"; -import { CountrySelect } from "./CountrySelect"; -import { StringListInput } from "./StringListInput"; -import { ImageSelector } from "./ImageSelector"; - -export class ScrapeResult { - public newValue?: T; - public originalValue?: T; - public scraped: boolean = false; - public useNewValue: boolean = false; - - public constructor( - originalValue?: T | null, - newValue?: T | null, - useNewValue?: boolean - ) { - this.originalValue = originalValue ?? undefined; - this.newValue = newValue ?? undefined; - // NOTE: this means that zero values are treated as null - // this is incorrect for numbers and booleans, but correct for strings - const hasNewValue = !!this.newValue; - - const valuesEqual = isEqual(originalValue, newValue); - this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); - this.scraped = hasNewValue && !valuesEqual; - } - - public setOriginalValue(value?: T) { - this.originalValue = value; - this.newValue = value; - } - - public cloneWithValue(value?: T) { - const ret = clone(this); - - ret.newValue = value; - ret.useNewValue = !isEqual(ret.newValue, ret.originalValue); - - // #2691 - if we're setting the value, assume it should be treated as - // scraped - ret.scraped = true; - - return ret; - } - - public getNewValue() { - if (this.useNewValue) { - return this.newValue; - } - } -} - -// for types where !!value is a valid value (boolean and number) -export class ZeroableScrapeResult extends ScrapeResult { - public constructor( - originalValue?: T | null, - newValue?: T | null, - useNewValue?: boolean - ) { - super(originalValue, newValue, useNewValue); - - const hasNewValue = this.newValue !== undefined; - - const valuesEqual = isEqual(originalValue, newValue); - this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); - this.scraped = hasNewValue && !valuesEqual; - } -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -export function hasScrapedValues(values: ScrapeResult[]) { - return values.some((r) => r.scraped); -} +import { CountrySelect } from "../CountrySelect"; +import { StringListInput } from "../StringListInput"; +import { ImageSelector } from "../ImageSelector"; +import { ScrapeResult } from "./scrapeResult"; export interface IHasName { name: string | undefined; diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapedObjectsRow.tsx b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapedObjectsRow.tsx new file mode 100644 index 00000000000..606821c789d --- /dev/null +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/ScrapedObjectsRow.tsx @@ -0,0 +1,269 @@ +import React, { useMemo } from "react"; +import * as GQL from "src/core/generated-graphql"; +import { + MovieSelect, + TagSelect, + StudioSelect, +} from "src/components/Shared/Select"; +import { + ScrapeDialogRow, + IHasName, +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; +import { PerformerSelect } from "src/components/Performers/PerformerSelect"; +import { ScrapeResult } from "src/components/Shared/ScrapeDialog/scrapeResult"; + +interface IScrapedStudioRow { + title: string; + result: ScrapeResult; + onChange: (value: ScrapeResult) => void; + newStudio?: GQL.ScrapedStudio; + onCreateNew?: (value: GQL.ScrapedStudio) => void; +} + +export const ScrapedStudioRow: React.FC = ({ + title, + result, + onChange, + newStudio, + onCreateNew, +}) => { + function renderScrapedStudio( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: string) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ? [resultValue] : []; + + return ( + { + if (onChangeFn) { + onChangeFn(items[0]?.id); + } + }} + ids={value} + /> + ); + } + + return ( + renderScrapedStudio(result)} + renderNewField={() => + renderScrapedStudio(result, true, (value) => + onChange(result.cloneWithValue(value)) + ) + } + onChange={onChange} + newValues={newStudio ? [newStudio] : undefined} + onCreateNew={() => { + if (onCreateNew && newStudio) onCreateNew(newStudio); + }} + /> + ); +}; + +interface IScrapedObjectsRow { + title: string; + result: ScrapeResult; + onChange: (value: ScrapeResult) => void; + newObjects?: T[]; + onCreateNew?: (value: T) => void; + renderObjects: ( + result: ScrapeResult, + isNew?: boolean, + onChange?: (value: R[]) => void + ) => JSX.Element; +} + +export const ScrapedObjectsRow = ( + props: IScrapedObjectsRow +) => { + const { title, result, onChange, newObjects, onCreateNew, renderObjects } = + props; + + return ( + renderObjects(result)} + renderNewField={() => + renderObjects(result, true, (value) => + onChange(result.cloneWithValue(value)) + ) + } + onChange={onChange} + newValues={newObjects} + onCreateNew={(i) => { + if (onCreateNew) onCreateNew(newObjects![i]); + }} + /> + ); +}; + +type IScrapedObjectRowImpl = Omit< + IScrapedObjectsRow, + "renderObjects" +>; + +export const ScrapedPerformersRow: React.FC< + IScrapedObjectRowImpl +> = ({ title, result, onChange, newObjects, onCreateNew }) => { + const performersCopy = useMemo(() => { + return ( + newObjects?.map((p) => { + const name: string = p.name ?? ""; + return { ...p, name }; + }) ?? [] + ); + }, [newObjects]); + + function renderScrapedPerformers( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: GQL.ScrapedPerformer[]) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ?? []; + + const selectValue = value.map((p) => { + const alias_list: string[] = []; + return { + id: p.stored_id ?? "", + name: p.name ?? "", + alias_list, + }; + }); + + return ( + { + if (onChangeFn) { + onChangeFn(items); + } + }} + values={selectValue} + /> + ); + } + + type PerformerType = GQL.ScrapedPerformer & { + name: string; + }; + + return ( + + title={title} + result={result} + renderObjects={renderScrapedPerformers} + onChange={onChange} + newObjects={performersCopy} + onCreateNew={onCreateNew} + /> + ); +}; + +export const ScrapedMoviesRow: React.FC< + IScrapedObjectRowImpl +> = ({ title, result, onChange, newObjects, onCreateNew }) => { + const moviesCopy = useMemo(() => { + return ( + newObjects?.map((p) => { + const name: string = p.name ?? ""; + return { ...p, name }; + }) ?? [] + ); + }, [newObjects]); + + type MovieType = GQL.ScrapedMovie & { + name: string; + }; + + function renderScrapedMovies( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: string[]) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ?? []; + + return ( + { + if (onChangeFn) { + onChangeFn(items.map((i) => i.id)); + } + }} + ids={value} + /> + ); + } + + return ( + + title={title} + result={result} + renderObjects={renderScrapedMovies} + onChange={onChange} + newObjects={moviesCopy} + onCreateNew={onCreateNew} + /> + ); +}; + +export const ScrapedTagsRow: React.FC< + IScrapedObjectRowImpl +> = ({ title, result, onChange, newObjects, onCreateNew }) => { + function renderScrapedTags( + scrapeResult: ScrapeResult, + isNew?: boolean, + onChangeFn?: (value: string[]) => void + ) { + const resultValue = isNew + ? scrapeResult.newValue + : scrapeResult.originalValue; + const value = resultValue ?? []; + + return ( + { + if (onChangeFn) { + onChangeFn(items.map((i) => i.id)); + } + }} + ids={value} + /> + ); + } + + return ( + + title={title} + result={result} + renderObjects={renderScrapedTags} + onChange={onChange} + newObjects={newObjects} + onCreateNew={onCreateNew} + /> + ); +}; diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/createObjects.ts b/ui/v2.5/src/components/Shared/ScrapeDialog/createObjects.ts new file mode 100644 index 00000000000..89f62845d2c --- /dev/null +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/createObjects.ts @@ -0,0 +1,192 @@ +import { useToast } from "src/hooks/Toast"; +import * as GQL from "src/core/generated-graphql"; +import { + useMovieCreate, + usePerformerCreate, + useStudioCreate, + useTagCreate, +} from "src/core/StashService"; +import { ScrapeResult } from "./scrapeResult"; +import { useIntl } from "react-intl"; +import { scrapedPerformerToCreateInput } from "src/core/performers"; +import { scrapedMovieToCreateInput } from "src/core/movies"; + +function useCreateObject( + entityTypeID: string, + createFunc: (o: T) => Promise +) { + const Toast = useToast(); + const intl = useIntl(); + + async function createNewObject(o: T) { + try { + await createFunc(o); + + Toast.success({ + content: intl.formatMessage( + { id: "toast.created_entity" }, + { + entity: intl + .formatMessage({ id: entityTypeID }) + .toLocaleLowerCase(), + } + ), + }); + } catch (e) { + Toast.error(e); + } + } + + return createNewObject; +} + +interface IUseCreateNewStudioProps { + scrapeResult: ScrapeResult; + setScrapeResult: (scrapeResult: ScrapeResult) => void; + setNewObject: (newObject: GQL.ScrapedStudio | undefined) => void; +} + +export function useCreateScrapedStudio(props: IUseCreateNewStudioProps) { + const [createStudio] = useStudioCreate(); + + const { scrapeResult, setScrapeResult, setNewObject } = props; + + async function createNewStudio(toCreate: GQL.ScrapedStudio) { + const result = await createStudio({ + variables: { + input: { + name: toCreate.name, + url: toCreate.url, + }, + }, + }); + + // set the new studio as the value + setScrapeResult(scrapeResult.cloneWithValue(result.data!.studioCreate!.id)); + setNewObject(undefined); + } + + return useCreateObject("studio", createNewStudio); +} + +interface IUseCreateNewPerformerProps { + scrapeResult: ScrapeResult; + setScrapeResult: (scrapeResult: ScrapeResult) => void; + newObjects: GQL.ScrapedPerformer[]; + setNewObjects: (newObject: GQL.ScrapedPerformer[]) => void; +} + +export function useCreateScrapedPerformer(props: IUseCreateNewPerformerProps) { + const [createPerformer] = usePerformerCreate(); + + const { scrapeResult, setScrapeResult, newObjects, setNewObjects } = props; + + async function createNewPerformer(toCreate: GQL.ScrapedPerformer) { + const input = scrapedPerformerToCreateInput(toCreate); + + const result = await createPerformer({ + variables: { input }, + }); + + const newValue = [...(scrapeResult.newValue ?? [])]; + if (result.data?.performerCreate) + newValue.push({ + stored_id: result.data.performerCreate.id, + name: result.data.performerCreate.name, + }); + + // add the new performer to the new performers value + const performerClone = scrapeResult.cloneWithValue(newValue); + setScrapeResult(performerClone); + + // remove the performer from the list + const newPerformersClone = newObjects.concat(); + const pIndex = newPerformersClone.findIndex( + (p) => p.name === toCreate.name + ); + if (pIndex === -1) throw new Error("Could not find performer to remove"); + + newPerformersClone.splice(pIndex, 1); + + setNewObjects(newPerformersClone); + } + + return useCreateObject("performer", createNewPerformer); +} + +interface IUseCreateNewObjectIDListProps< + T extends { name?: string | undefined | null } +> { + scrapeResult: ScrapeResult; + setScrapeResult: (scrapeResult: ScrapeResult) => void; + newObjects: T[]; + setNewObjects: (newObject: T[]) => void; +} + +function useCreateNewObjectIDList< + T extends { name?: string | undefined | null } +>( + entityTypeID: string, + props: IUseCreateNewObjectIDListProps, + createObject: (toCreate: T) => Promise +) { + const { scrapeResult, setScrapeResult, newObjects, setNewObjects } = props; + + async function createNewObject(toCreate: T) { + const newID = await createObject(toCreate); + + // add the new object to the new objects value + const newResult = scrapeResult.cloneWithValue(scrapeResult.newValue); + if (!newResult.newValue) { + newResult.newValue = []; + } + newResult.newValue.push(newID); + setScrapeResult(newResult); + + // remove the object from the list + const newObjectsClone = newObjects.concat(); + const pIndex = newObjectsClone.findIndex((p) => p.name === toCreate.name); + if (pIndex === -1) throw new Error("Could not find object to remove"); + newObjectsClone.splice(pIndex, 1); + + setNewObjects(newObjectsClone); + } + + return useCreateObject(entityTypeID, createNewObject); +} + +export function useCreateScrapedMovie( + props: IUseCreateNewObjectIDListProps +) { + const [createMovie] = useMovieCreate(); + + async function createNewMovie(toCreate: GQL.ScrapedMovie) { + const movieInput = scrapedMovieToCreateInput(toCreate); + const result = await createMovie({ + variables: { input: movieInput }, + }); + + return result.data?.movieCreate?.id ?? ""; + } + + return useCreateNewObjectIDList("movie", props, createNewMovie); +} + +export function useCreateScrapedTag( + props: IUseCreateNewObjectIDListProps +) { + const [createTag] = useTagCreate(); + + async function createNewTag(toCreate: GQL.ScrapedTag) { + const tagInput: GQL.TagCreateInput = { name: toCreate.name ?? "" }; + const result = await createTag({ + variables: { + input: tagInput, + }, + }); + + return result.data?.tagCreate?.id ?? ""; + } + + return useCreateNewObjectIDList("tag", props, createNewTag); +} diff --git a/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts b/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts new file mode 100644 index 00000000000..a8ee60e1ee9 --- /dev/null +++ b/ui/v2.5/src/components/Shared/ScrapeDialog/scrapeResult.ts @@ -0,0 +1,71 @@ +import isEqual from "lodash-es/isEqual"; +import clone from "lodash-es/clone"; + +export class ScrapeResult { + public newValue?: T; + public originalValue?: T; + public scraped: boolean = false; + public useNewValue: boolean = false; + + public constructor( + originalValue?: T | null, + newValue?: T | null, + useNewValue?: boolean + ) { + this.originalValue = originalValue ?? undefined; + this.newValue = newValue ?? undefined; + // NOTE: this means that zero values are treated as null + // this is incorrect for numbers and booleans, but correct for strings + const hasNewValue = !!this.newValue; + + const valuesEqual = isEqual(originalValue, newValue); + this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); + this.scraped = hasNewValue && !valuesEqual; + } + + public setOriginalValue(value?: T) { + this.originalValue = value; + this.newValue = value; + } + + public cloneWithValue(value?: T) { + const ret = clone(this); + + ret.newValue = value; + ret.useNewValue = !isEqual(ret.newValue, ret.originalValue); + + // #2691 - if we're setting the value, assume it should be treated as + // scraped + ret.scraped = true; + + return ret; + } + + public getNewValue() { + if (this.useNewValue) { + return this.newValue; + } + } +} + +// for types where !!value is a valid value (boolean and number) +export class ZeroableScrapeResult extends ScrapeResult { + public constructor( + originalValue?: T | null, + newValue?: T | null, + useNewValue?: boolean + ) { + super(originalValue, newValue, useNewValue); + + const hasNewValue = this.newValue !== undefined; + + const valuesEqual = isEqual(originalValue, newValue); + this.useNewValue = useNewValue ?? (hasNewValue && !valuesEqual); + this.scraped = hasNewValue && !valuesEqual; + } +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function hasScrapedValues(values: ScrapeResult[]) { + return values.some((r) => r.scraped); +} diff --git a/ui/v2.5/src/components/Shared/Select.tsx b/ui/v2.5/src/components/Shared/Select.tsx index 6ae86b05cbe..495df4f5e2d 100644 --- a/ui/v2.5/src/components/Shared/Select.tsx +++ b/ui/v2.5/src/components/Shared/Select.tsx @@ -16,11 +16,9 @@ import { useAllTagsForFilter, useAllMoviesForFilter, useAllStudiosForFilter, - useAllPerformersForFilter, useMarkerStrings, useTagCreate, useStudioCreate, - usePerformerCreate, useMovieCreate, } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; @@ -33,6 +31,7 @@ import { TagPopover } from "../Tags/TagPopover"; import { defaultMaxOptionsShown, IUIConfig } from "src/core/config"; import { useDebouncedSetState } from "src/hooks/debounce"; import { Placement } from "react-bootstrap/esm/Overlay"; +import { PerformerIDSelect } from "../Performers/PerformerSelect"; export type SelectObject = { id: string; @@ -45,12 +44,9 @@ interface ITypeProps { type?: | "performers" | "studios" - | "parent_studios" | "tags" - | "sceneTags" - | "performerTags" - | "parentTags" - | "childTags" + | "scene_tags" + | "performer_tags" | "movies"; } interface IFilterProps { @@ -533,152 +529,7 @@ export const MarkerTitleSuggest: React.FC = (props) => { }; export const PerformerSelect: React.FC = (props) => { - const [performerAliases, setPerformerAliases] = useState< - Record - >({}); - const [performerDisambiguations, setPerformerDisambiguations] = useState< - Record - >({}); - const [allAliases, setAllAliases] = useState([]); - const { data, loading } = useAllPerformersForFilter(); - const [createPerformer] = usePerformerCreate(); - - const { configuration } = React.useContext(ConfigurationContext); - const intl = useIntl(); - const defaultCreatable = - !configuration?.interface.disableDropdownCreate.performer ?? true; - - const performers = useMemo( - () => data?.allPerformers ?? [], - [data?.allPerformers] - ); - - useEffect(() => { - // build the tag aliases map - const newAliases: Record = {}; - const newDisambiguations: Record = {}; - const newAll: string[] = []; - performers.forEach((t) => { - if (t.alias_list.length) { - newAliases[t.id] = t.alias_list; - } - newAll.push(...t.alias_list); - if (t.disambiguation) { - newDisambiguations[t.id] = t.disambiguation; - } - }); - setPerformerAliases(newAliases); - setAllAliases(newAll); - setPerformerDisambiguations(newDisambiguations); - }, [performers]); - - const PerformerOption: React.FC> = ( - optionProps - ) => { - const { inputValue } = optionProps.selectProps; - - let thisOptionProps = optionProps; - - let { label } = optionProps.data; - const id = Number(optionProps.data.value); - - if (id && performerDisambiguations[id]) { - label += ` (${performerDisambiguations[id]})`; - } - - if ( - inputValue && - !optionProps.label.toLowerCase().includes(inputValue.toLowerCase()) - ) { - // must be alias - label += " (alias)"; - } - - if (label != optionProps.data.label) { - thisOptionProps = { - ...optionProps, - children: label, - }; - } - - return ; - }; - - const filterOption = (option: Option, rawInput: string): boolean => { - if (!rawInput) { - return true; - } - - const input = rawInput.toLowerCase(); - const optionVal = option.label.toLowerCase(); - - if (optionVal.includes(input)) { - return true; - } - - // search for performer aliases - const aliases = performerAliases[option.value]; - return aliases && aliases.some((a) => a.toLowerCase().includes(input)); - }; - - const isValidNewOption = ( - inputValue: string, - value: Options
+ + + } + checked={config.markSceneAsOrganizedOnSave} + onChange={(e: React.ChangeEvent) => + setConfig({ + ...config, + markSceneAsOrganizedOnSave: e.currentTarget.checked, + }) + } + /> + + + +
diff --git a/ui/v2.5/src/components/Tagger/scenes/PerformerResult.tsx b/ui/v2.5/src/components/Tagger/scenes/PerformerResult.tsx index b17f7aedcbf..19f072c4418 100755 --- a/ui/v2.5/src/components/Tagger/scenes/PerformerResult.tsx +++ b/ui/v2.5/src/components/Tagger/scenes/PerformerResult.tsx @@ -1,4 +1,4 @@ -import React from "react"; +import React, { useEffect, useState } from "react"; import { Button, ButtonGroup } from "react-bootstrap"; import { FormattedMessage } from "react-intl"; import cx from "classnames"; @@ -6,9 +6,12 @@ import cx from "classnames"; import * as GQL from "src/core/generated-graphql"; import { Icon } from "src/components/Shared/Icon"; import { OperationButton } from "src/components/Shared/OperationButton"; -import { PerformerSelect, SelectObject } from "src/components/Shared/Select"; import { OptionalField } from "../IncludeButton"; import { faSave } from "@fortawesome/free-solid-svg-icons"; +import { + Performer, + PerformerSelect, +} from "src/components/Performers/PerformerSelect"; interface IPerformerResultProps { performer: GQL.ScrapedPerformer; @@ -40,10 +43,25 @@ const PerformerResult: React.FC = ({ stashID.stash_id === performer.remote_site_id ); - const handlePerformerSelect = (performers: SelectObject[]) => { + const [selectedPerformer, setSelectedPerformer] = useState< + Performer | undefined + >(); + + useEffect(() => { + if ( + performerData?.findPerformer && + selectedID === performerData?.findPerformer?.id + ) { + setSelectedPerformer(performerData.findPerformer); + } + }, [performerData?.findPerformer, selectedID]); + + const handlePerformerSelect = (performers: Performer[]) => { if (performers.length) { + setSelectedPerformer(performers[0]); setSelectedID(performers[0].id); } else { + setSelectedPerformer(undefined); setSelectedID(undefined); } }; @@ -114,7 +132,7 @@ const PerformerResult: React.FC = ({ = ({ tag, tabKey }) => { const uiConfig = configuration?.ui as IUIConfig | undefined; const abbreviateCounter = uiConfig?.abbreviateCounters ?? false; const enableBackgroundImage = uiConfig?.enableTagBackgroundImage ?? false; - const showAllDetails = uiConfig?.showAllDetails ?? false; + const showAllDetails = uiConfig?.showAllDetails ?? true; const compactExpandedDetails = uiConfig?.compactExpandedDetails ?? false; const [collapsed, setCollapsed] = useState(!showAllDetails); @@ -107,11 +108,32 @@ const TagPage: React.FC = ({ tag, tabKey }) => { const performerCount = (showAllCounts ? tag.performer_count_all : tag.performer_count) ?? 0; + const populatedDefaultTab = useMemo(() => { + let ret: TabKey = "scenes"; + if (sceneCount == 0) { + if (imageCount != 0) { + ret = "images"; + } else if (galleryCount != 0) { + ret = "galleries"; + } else if (sceneMarkerCount != 0) { + ret = "markers"; + } else if (performerCount != 0) { + ret = "performers"; + } + } + + return ret; + }, [sceneCount, imageCount, galleryCount, sceneMarkerCount, performerCount]); + + if (tabKey === defaultTab) { + tabKey = populatedDefaultTab; + } + function setTabKey(newTabKey: string | null) { - if (!newTabKey) newTabKey = defaultTab; + if (!newTabKey || newTabKey === defaultTab) newTabKey = populatedDefaultTab; if (newTabKey === tabKey) return; - if (newTabKey === defaultTab) { + if (newTabKey === populatedDefaultTab) { history.replace(`/tags/${tag.id}`); } else if (isTabKey(newTabKey)) { history.replace(`/tags/${tag.id}/${newTabKey}`); diff --git a/ui/v2.5/src/core/StashService.ts b/ui/v2.5/src/core/StashService.ts index c6cc823c0a9..c7270e2e23d 100644 --- a/ui/v2.5/src/core/StashService.ts +++ b/ui/v2.5/src/core/StashService.ts @@ -268,8 +268,22 @@ export const queryFindPerformers = (filter: ListFilterModel) => }, }); -export const useAllPerformersForFilter = () => - GQL.useAllPerformersForFilterQuery(); +export const queryFindPerformersByIDForSelect = (performerIDs: number[]) => + client.query({ + query: GQL.FindPerformersForSelectDocument, + variables: { + performer_ids: performerIDs, + }, + }); + +export const queryFindPerformersForSelect = (filter: ListFilterModel) => + client.query({ + query: GQL.FindPerformersForSelectDocument, + variables: { + filter: filter.makeFindFilter(), + performer_filter: filter.makeFilter(), + }, + }); export const useFindStudio = (id: string) => { const skip = id === "new" || id === ""; @@ -1372,8 +1386,6 @@ export const usePerformerCreate = () => const performer = result.data?.performerCreate; if (!performer) return; - appendObject(cache, performer, GQL.AllPerformersForFilterDocument); - // update stats updateStats(cache, "performer_count", 1); diff --git a/ui/v2.5/src/core/config.ts b/ui/v2.5/src/core/config.ts index 6988e1ba4b6..854ed78c54a 100644 --- a/ui/v2.5/src/core/config.ts +++ b/ui/v2.5/src/core/config.ts @@ -55,8 +55,10 @@ export interface IUIConfig { compactExpandedDetails?: boolean; // if true show all content details by default showAllDetails?: boolean; + // if true the chromecast option will enabled enableChromecast?: boolean; + // if true continue scene will always play from the beginning alwaysStartFromBeginning?: boolean; // if true enable activity tracking @@ -65,6 +67,8 @@ export interface IUIConfig { // before the play count is incremented minimumPlayPercent?: number; + showAbLoopControls?: boolean; + // maximum number of items to shown in the dropdown list - defaults to 200 // upper limit of 1000 maxOptionsShown?: number; diff --git a/ui/v2.5/src/docs/en/Changelog/v0220.md b/ui/v2.5/src/docs/en/Changelog/v0220.md index ea2a7d96739..86d2ecb58d7 100644 --- a/ui/v2.5/src/docs/en/Changelog/v0220.md +++ b/ui/v2.5/src/docs/en/Changelog/v0220.md @@ -20,6 +20,11 @@ * Added support for `-v/--version` command line flag. ([#3883](https://github.com/stashapp/stash/pull/3883)) ### 🐛 Bug fixes +* **[0.22.1]** Fixed Batch Update Performers not working correctly. ([#4024](https://github.com/stashapp/stash/pull/4024)) +* **[0.22.1]** Fixed panic when creating Studios during Identify task. ([#4024](https://github.com/stashapp/stash/pull/4024)) +* **[0.22.1]** Added explicit option to store blobs in database at setup, and fixed default blobs path. ([#4038](https://github.com/stashapp/stash/pull/4038)) +* **[0.22.1]** Fixed dropdown appearing beneath other controls on the Performer and Tag pages. ([#4039](https://github.com/stashapp/stash/pull/4039)) +* **[0.22.1]** Fixed buttons moving around when setting marker time when creating a new marker. ([#4040](https://github.com/stashapp/stash/pull/4040)) * Fixing sorting of performer tags. ([#4018](https://github.com/stashapp/stash/pull/4018)) * Fixed scene URLs being cleared when merging scenes. ([#4005](https://github.com/stashapp/stash/pull/4005)) * Fixed setting the Create Missing flag in the Identify dialog not working. ([#4008](https://github.com/stashapp/stash/pull/4008)) diff --git a/ui/v2.5/src/docs/en/Manual/KeyboardShortcuts.md b/ui/v2.5/src/docs/en/Manual/KeyboardShortcuts.md index c496e8b088e..cd3586a10cd 100644 --- a/ui/v2.5/src/docs/en/Manual/KeyboardShortcuts.md +++ b/ui/v2.5/src/docs/en/Manual/KeyboardShortcuts.md @@ -78,7 +78,8 @@ | `↑` | Increase volume 10% | | `↓` | Decrease volume 10% | | `m` | Toggle mute | -| `Shift + l` | Toggle player looping | +| `l` | A/B looping toggle. Press once to set start point. Press again to set end point. Press again to disable loop. | +| `Shift + l` | Toggle looping of scene when it's over | ### Scene Markers tab shortcuts diff --git a/ui/v2.5/src/hooks/sprite.ts b/ui/v2.5/src/hooks/sprite.ts new file mode 100644 index 00000000000..8d66c2fa568 --- /dev/null +++ b/ui/v2.5/src/hooks/sprite.ts @@ -0,0 +1,62 @@ +import { useEffect, useState } from "react"; +import { WebVTT } from "videojs-vtt.js"; + +export interface ISceneSpriteInfo { + url: string; + start: number; + end: number; + x: number; + y: number; + w: number; + h: number; +} + +function getSpriteInfo(vttPath: string, response: string) { + const sprites: ISceneSpriteInfo[] = []; + + const parser = new WebVTT.Parser(window, WebVTT.StringDecoder()); + parser.oncue = (cue: VTTCue) => { + const match = cue.text.match(/^([^#]*)#xywh=(\d+),(\d+),(\d+),(\d+)$/i); + if (!match) return; + + sprites.push({ + url: new URL(match[1], vttPath).href, + start: cue.startTime, + end: cue.endTime, + x: Number(match[2]), + y: Number(match[3]), + w: Number(match[4]), + h: Number(match[5]), + }); + }; + parser.parse(response); + parser.flush(); + + return sprites; +} + +export function useSpriteInfo(vttPath: string | undefined) { + const [spriteInfo, setSpriteInfo] = useState< + ISceneSpriteInfo[] | undefined + >(); + + useEffect(() => { + if (!vttPath) { + setSpriteInfo(undefined); + return; + } + + fetch(vttPath).then((response) => { + if (!response.ok) { + setSpriteInfo(undefined); + return; + } + + response.text().then((text) => { + setSpriteInfo(getSpriteInfo(vttPath, text)); + }); + }); + }, [vttPath]); + + return spriteInfo; +} diff --git a/ui/v2.5/src/index.scss b/ui/v2.5/src/index.scss index bb278866223..54ab41ec991 100755 --- a/ui/v2.5/src/index.scss +++ b/ui/v2.5/src/index.scss @@ -739,8 +739,9 @@ div.dropdown-menu { color: $dark-text; font-size: 12px; line-height: 1rem; + margin-right: -0.5rem; opacity: 0.5; - padding: 0 0 0 0.5rem; + padding: 0 0.5rem; position: relative; &:active, diff --git a/ui/v2.5/src/locales/da-DK.json b/ui/v2.5/src/locales/da-DK.json index b83b8b48004..d3baeaeb4d9 100644 --- a/ui/v2.5/src/locales/da-DK.json +++ b/ui/v2.5/src/locales/da-DK.json @@ -877,7 +877,7 @@ "path": "Sti", "perceptual_similarity": "Perceptuel lighed (phash)", "performer": "Kunstner", - "performerTags": "Kunstner Tags", + "performer_tags": "Kunstner Tags", "performer_age": "kunstnere Alder", "performer_count": "Kunstner Antal", "performer_favorite": "Foretrukken optrædende", @@ -927,7 +927,7 @@ "resolution": "Opløsning", "scene": "Scene", "sceneTagger": "Scenetagger", - "sceneTags": "Scene-etiketter", + "scene_tags": "Scene-etiketter", "scene_count": "Scene antal", "scene_id": "Scene-id", "scenes": "Scener", diff --git a/ui/v2.5/src/locales/de-DE.json b/ui/v2.5/src/locales/de-DE.json index 0fae66ce2c4..5792d627fc2 100644 --- a/ui/v2.5/src/locales/de-DE.json +++ b/ui/v2.5/src/locales/de-DE.json @@ -1,1267 +1,1266 @@ { - "actions": { - "add": "Hinzufügen", - "add_directory": "Ordner hinzufügen", - "add_entity": "Füge {entityType} hinzu", - "add_to_entity": "Hinzufügen zu {entityType}", - "allow": "Erlauben", - "allow_temporarily": "Vorübergehend erlauben", - "anonymise": "Anonymisieren", - "apply": "Übernehmen", - "auto_tag": "Auto-Tag", - "backup": "Backup", - "browse_for_image": "Nach Bild suchen…", - "cancel": "Abbrechen", - "clean": "Aufräumen", - "clear": "Leeren", - "clear_back_image": "Rückseite entfernen", - "clear_front_image": "Vorderseite entfernen", - "clear_image": "Bild entfernen", - "close": "Schließen", - "confirm": "Bestätigen", - "continue": "Fortsetzen", - "create": "Erstellen", - "create_chapters": "Kapitel erstellen", - "create_entity": "Erstelle {entityType}", - "create_marker": "Erstelle Markierung", - "created_entity": "{entity_type} erstellt: {entity_name}", - "customise": "Anpassen", - "delete": "Löschen", - "delete_entity": "Lösche {entityType}", - "delete_file": "Lösche Datei", - "delete_file_and_funscript": "Datei löschen (inkl. funscript)", - "delete_generated_supporting_files": "Lösche generierte Hilfsdaten", - "delete_stashid": "StashID löschen", - "disallow": "Nicht erlauben", - "download": "Herunterladen", - "download_anonymised": "Anonymisiert herunterladen", - "download_backup": "Lade Backup herunter", - "edit": "Bearbeiten", - "edit_entity": "Bearbeiten {entityType}", - "export": "Exportieren", - "export_all": "Alle exportieren…", - "find": "Suchen", - "finish": "Fertig", - "from_file": "Aus Datei…", - "from_url": "Von URL…", - "full_export": "Vollständiger Export", - "full_import": "Vollständiger Import", - "generate": "Generieren", - "generate_thumb_default": "Erstelle voreingestelltes Vorschaubild", - "generate_thumb_from_current": "Erstelle Vorschaubild vom Gegenwärtigen", - "hash_migration": "Hash Umwandlung", - "hide": "Verstecke", - "hide_configuration": "Konfiguration ausblenden", - "identify": "Identifizieren", - "ignore": "Ignorieren", - "import": "Importieren…", - "import_from_file": "Importieren aus Datei", - "logout": "Ausloggen", - "make_primary": "Als Primärquelle festlegen", - "merge": "Zusammenführen", - "merge_from": "Zusammenführen aus", - "merge_into": "Zusammenführen in", - "migrate_blobs": "Blobs migrieren", - "migrate_scene_screenshots": "Szenen-Screenshots migrieren", - "next_action": "Nächste", - "not_running": "wird nicht ausgeführt", - "open_in_external_player": "In externem Player öffnen", - "open_random": "Öffne Zufällig", - "overwrite": "Überschreiben", - "play_random": "Zufällige Wiedergabe", - "play_selected": "Spiele ausgewählte", - "preview": "Vorschau", - "previous_action": "Zurück", - "reassign": "Neu zuordnen", - "refresh": "Aktualisieren", - "reload_plugins": "Plugins neu laden", - "reload_scrapers": "Scraper neu laden", - "remove": "Entfernen", - "remove_from_gallery": "Aus Gallerie entfernen", - "rename_gen_files": "Hilfsdaten umbenennen", - "rescan": "Erneut scannen", - "reshuffle": "Neu mischen", - "running": "wird ausgeführt", - "save": "Speichern", - "save_delete_settings": "Verwende Option standardmäßig beim Löschen", - "save_filter": "Filter speichern", - "scan": "Scannen", - "scrape": "Scrapen", - "scrape_query": "Scrape Anfrage", - "scrape_scene_fragment": "An Bruchstück scrapen", - "scrape_with": "Scrape mit…", - "search": "Suchen", - "select_all": "Alle auswählen", - "select_entity": "{entityType} auswählen", - "select_folders": "Ordner auswählen", - "select_none": "Nichts auswählen", - "selective_auto_tag": "Automatisch selektiv taggen", - "selective_clean": "Selektive Reinigung", - "selective_scan": "Selektiv scannen", - "set_as_default": "Als Voreinstellung festlegen", - "set_back_image": "Rückseite…", - "set_front_image": "Vorderseite…", - "set_image": "Bild festlegen…", - "show": "Anzeigen", - "show_configuration": "Konfiguration anzeigen", - "skip": "Überspringen", - "split": "Trennen", - "stop": "Stopp", - "submit": "Einreichen", - "submit_stash_box": "Zu Stash-Box übermitteln", - "submit_update": "Aktualisierung übermitteln", - "swap": "Tauschen", - "tasks": { - "clean_confirm_message": "Wollen Sie wirklich die Datenbank aufräumen? Dies wird alle Informationen und Hilfsdaten für Szenen und Galerien löschen, die nicht mehr auf dem Dateisystem vorhanden sind.", - "dry_mode_selected": "Trockenmodus ausgewählt. Es findet keine Löschung der Daten statt, lediglich Protokollierung.", - "import_warning": "Wollen Sie wirklich die Datenbank importieren? Dies wird die aktuelle Datenbank mit der importierten Datenbank überschreiben." - }, - "temp_disable": "Vorübergehend deaktivieren…", - "temp_enable": "Vorübergehend aktivieren…", - "unset": "Aufheben", - "use_default": "Standard verwenden", - "view_random": "Zeige Zufällige" - }, - "actions_name": "Aktionen", - "age": "Alter", - "aliases": "Aliase", - "all": "Alle", - "also_known_as": "Auch bekannt unter", - "appears_with": "Tritt auf mit", - "ascending": "Aufsteigend", - "average_resolution": "Durchschnittliche Auflösung", - "between_and": "und", - "birth_year": "Geburtsjahr", - "birthdate": "Geburtsdatum", - "bitrate": "Bitrate", - "blobs_storage_type": { - "database": "Datenbank", - "filesystem": "Dateisystem" - }, - "captions": "Untertitel", - "career_length": "Karrierelänge", - "chapters": "Kapitel", - "circumcised": "Beschnitten", - "circumcised_types": { - "CUT": "Beschnitten", - "UNCUT": "Unbeschnitten" - }, - "component_tagger": { - "config": { - "active_instance": "Aktive stash-box Instanz:", - "blacklist_desc": "Auf der Blacklist befindliche Objekte sind von Anfragen ausgenommen. Objekte sind reguläre Ausdrücke und Groß-/Kleinschreibung wird nicht beachtet. Manchen Zeichen muss ein Fluchtsymbol (Backslash) vorangestellt werden: {chars_require_escape}", - "blacklist_label": "Schwarze Liste", - "query_mode_auto": "Automatisch", - "query_mode_auto_desc": "Nutzt Metadaten sofern verfügbar bzw. Dateinamen", - "query_mode_dir": "Verzeichnis", - "query_mode_dir_desc": "Nutzt nur den übergeordneten Ordner", - "query_mode_filename": "Dateiname", - "query_mode_filename_desc": "Nutzt nur den Dateinamen", - "query_mode_label": "Suchmodus", - "query_mode_metadata": "Metadaten", - "query_mode_metadata_desc": "Nutzt nur Metadaten", - "query_mode_path": "Pfad", - "query_mode_path_desc": "Nutzt vollständigen Dateipfad", - "set_cover_desc": "Überschreibe Titelbild sofern verfügbar.", - "set_cover_label": "Setze Cover-Bild", - "set_tag_desc": "Hänge Tags der Szene an, entweder durch Überschreiben oder Zusammenführen mit bereits angehängten Tags.", - "set_tag_label": "Tags anhängen", - "show_male_desc": "Auswahl ob männliche Darsteller der Szene hinzugefügt werden können.", - "show_male_label": "Männliche Darsteller anzeigen", - "source": "Quelle" - }, - "noun_query": "Anfrage", - "results": { - "duration_off": "Laufzeitunterschied bei mindestens {number}sek", - "duration_unknown": "Laufzeit unbekannt", - "fp_found": "{fpCount, plural, =0 {Keine neuen Fingerabdruckübereinstimmungen gefunden} other {# neue Fingerabdruckübereinstimmungen gefunden}}", - "fp_matches": "Übereinstimmung der Laufzeit", - "fp_matches_multi": "Laufzeit stimmt mit {matchCount}/{durationsLength} Fingerabdrücken überein", - "hash_matches": "Übereinstimmung bei {hash_type}", - "match_failed_already_tagged": "Szene bereits getagged", - "match_failed_no_result": "Keine Übereinstimmungen gefunden", - "match_success": "Szene erfolgreich getagged", - "phash_matches": "{count} PHashes übereinstimmung", - "unnamed": "Unbenannt" - }, - "verb_match_fp": "Fingerabdrücke zuordnen", - "verb_matched": "zugeordnet", - "verb_scrape_all": "Alles Scrapen", - "verb_submit_fp": "Übermittele {fpCount, plural, one{# Fingerabdruck} other{# Fingerabdrücke}}", - "verb_toggle_config": "{toggle} {configuration}", - "verb_toggle_unmatched": "{toggle} nicht zugeordnete Szenen" - }, - "config": { - "about": { - "build_hash": "Hash des Builds:", - "build_time": "Zeitpunkt des Builds:", - "check_for_new_version": "Suche nach Updates", - "latest_version": "Aktuellste Version", - "latest_version_build_hash": "Neuester Build Hash:", - "new_version_notice": "[NEU]", - "release_date": "Veröffentlichungsdatum:", - "stash_discord": "Komm in unseren {url} Kanal", - "stash_home": "Stash ist beheimatet auf {url}", - "stash_open_collective": "Unterstütze uns über {url}", - "stash_wiki": "Stash {url} Seite", - "version": "Version" + "actions": { + "add": "Hinzufügen", + "add_directory": "Ordner hinzufügen", + "add_entity": "Füge {entityType} hinzu", + "add_to_entity": "Hinzufügen zu {entityType}", + "allow": "Erlauben", + "allow_temporarily": "Vorübergehend erlauben", + "anonymise": "Anonymisieren", + "apply": "Übernehmen", + "auto_tag": "Auto-Tag", + "backup": "Backup", + "browse_for_image": "Nach Bild suchen…", + "cancel": "Abbrechen", + "clean": "Aufräumen", + "clear": "Leeren", + "clear_back_image": "Rückseite entfernen", + "clear_front_image": "Vorderseite entfernen", + "clear_image": "Bild entfernen", + "close": "Schließen", + "confirm": "Bestätigen", + "continue": "Fortsetzen", + "create": "Erstellen", + "create_chapters": "Kapitel erstellen", + "create_entity": "Erstelle {entityType}", + "create_marker": "Erstelle Markierung", + "created_entity": "{entity_type} erstellt: {entity_name}", + "customise": "Anpassen", + "delete": "Löschen", + "delete_entity": "Lösche {entityType}", + "delete_file": "Lösche Datei", + "delete_file_and_funscript": "Datei löschen (inkl. funscript)", + "delete_generated_supporting_files": "Lösche generierte Hilfsdaten", + "delete_stashid": "StashID löschen", + "disallow": "Nicht erlauben", + "download": "Herunterladen", + "download_anonymised": "Anonymisiert herunterladen", + "download_backup": "Lade Backup herunter", + "edit": "Bearbeiten", + "edit_entity": "Bearbeiten {entityType}", + "export": "Exportieren", + "export_all": "Alle exportieren…", + "find": "Suchen", + "finish": "Fertig", + "from_file": "Aus Datei…", + "from_url": "Von URL…", + "full_export": "Vollständiger Export", + "full_import": "Vollständiger Import", + "generate": "Generieren", + "generate_thumb_default": "Erstelle voreingestelltes Vorschaubild", + "generate_thumb_from_current": "Erstelle Vorschaubild vom Gegenwärtigen", + "hash_migration": "Hash Umwandlung", + "hide": "Verstecke", + "hide_configuration": "Konfiguration ausblenden", + "identify": "Identifizieren", + "ignore": "Ignorieren", + "import": "Importieren…", + "import_from_file": "Importieren aus Datei", + "logout": "Ausloggen", + "make_primary": "Als Primärquelle festlegen", + "merge": "Zusammenführen", + "merge_from": "Zusammenführen aus", + "merge_into": "Zusammenführen in", + "migrate_blobs": "Blobs migrieren", + "migrate_scene_screenshots": "Szenen-Screenshots migrieren", + "next_action": "Nächste", + "not_running": "wird nicht ausgeführt", + "open_in_external_player": "In externem Player öffnen", + "open_random": "Öffne Zufällig", + "overwrite": "Überschreiben", + "play_random": "Zufällige Wiedergabe", + "play_selected": "Spiele ausgewählte", + "preview": "Vorschau", + "previous_action": "Zurück", + "reassign": "Neu zuordnen", + "refresh": "Aktualisieren", + "reload_plugins": "Plugins neu laden", + "reload_scrapers": "Scraper neu laden", + "remove": "Entfernen", + "remove_from_gallery": "Aus Gallerie entfernen", + "rename_gen_files": "Hilfsdaten umbenennen", + "rescan": "Erneut scannen", + "reshuffle": "Neu mischen", + "running": "wird ausgeführt", + "save": "Speichern", + "save_delete_settings": "Verwende Option standardmäßig beim Löschen", + "save_filter": "Filter speichern", + "scan": "Scannen", + "scrape": "Scrapen", + "scrape_query": "Scrape Anfrage", + "scrape_scene_fragment": "An Bruchstück scrapen", + "scrape_with": "Scrape mit…", + "search": "Suchen", + "select_all": "Alle auswählen", + "select_entity": "{entityType} auswählen", + "select_folders": "Ordner auswählen", + "select_none": "Nichts auswählen", + "selective_auto_tag": "Automatisch selektiv taggen", + "selective_clean": "Selektive Reinigung", + "selective_scan": "Selektiv scannen", + "set_as_default": "Als Voreinstellung festlegen", + "set_back_image": "Rückseite…", + "set_front_image": "Vorderseite…", + "set_image": "Bild festlegen…", + "show": "Anzeigen", + "show_configuration": "Konfiguration anzeigen", + "skip": "Überspringen", + "split": "Trennen", + "stop": "Stopp", + "submit": "Einreichen", + "submit_stash_box": "Zu Stash-Box übermitteln", + "submit_update": "Aktualisierung übermitteln", + "swap": "Tauschen", + "tasks": { + "clean_confirm_message": "Wollen Sie wirklich die Datenbank aufräumen? Dies wird alle Informationen und Hilfsdaten für Szenen und Galerien löschen, die nicht mehr auf dem Dateisystem vorhanden sind.", + "dry_mode_selected": "Trockenmodus ausgewählt. Es findet keine Löschung der Daten statt, lediglich Protokollierung.", + "import_warning": "Wollen Sie wirklich die Datenbank importieren? Dies wird die aktuelle Datenbank mit der importierten Datenbank überschreiben." + }, + "temp_disable": "Vorübergehend deaktivieren…", + "temp_enable": "Vorübergehend aktivieren…", + "unset": "Aufheben", + "use_default": "Standard verwenden", + "view_random": "Zeige Zufällige" }, - "application_paths": { - "heading": "Anwendungspfade" + "actions_name": "Aktionen", + "age": "Alter", + "aliases": "Aliase", + "all": "Alle", + "also_known_as": "Auch bekannt unter", + "appears_with": "Tritt auf mit", + "ascending": "Aufsteigend", + "average_resolution": "Durchschnittliche Auflösung", + "between_and": "und", + "birth_year": "Geburtsjahr", + "birthdate": "Geburtsdatum", + "bitrate": "Bitrate", + "blobs_storage_type": { + "database": "Datenbank", + "filesystem": "Dateisystem" }, - "categories": { - "about": "Über", - "changelog": "Änderungsprotokoll", - "interface": "Oberfläche", - "logs": "Protokoll", - "metadata_providers": "Metadaten-Anbieter", - "plugins": "Plugins", - "scraping": "Durchsuchen", - "security": "Sicherheit", - "services": "Dienste", - "system": "System", - "tasks": "Aufgaben", - "tools": "Werkzeuge" + "captions": "Untertitel", + "career_length": "Karrierelänge", + "chapters": "Kapitel", + "circumcised": "Beschnitten", + "circumcised_types": { + "CUT": "Beschnitten", + "UNCUT": "Unbeschnitten" }, - "dlna": { - "allow_temp_ip": "Erlaube {tempIP}", - "allowed_ip_addresses": "Erlaubte IP Adressen", - "allowed_ip_temporarily": "Temporär erlaubte IP", - "default_ip_whitelist": "Standard IP Whitelist", - "default_ip_whitelist_desc": "Standard IP Adressen, welche DLNA nutzen dürfen. Nutze {wildcard} um alle IP Adressen zu erlauben.", - "disabled_dlna_temporarily": "DLNA vorübergehend deaktiviert", - "disallowed_ip": "Unzulässige IP", - "enabled_by_default": "Standardmäßig aktiviert", - "enabled_dlna_temporarily": "DLNA vorübergehend aktiviert", - "network_interfaces": "Netzwerkoberflächen", - "network_interfaces_desc": "Netzwerkoberflächen auf denen DLNA sichtbar ist. Eine leere Liste führt dazu, dass DLNA auf allen Oberflächen ausgeführt wird. Benötigt Neustart des DLNA nach Änderungen.", - "recent_ip_addresses": "Letzte IP Adressen", - "server_display_name": "Server Anzeigename", - "server_display_name_desc": "Anzeigename des DLNA-Servers. Standardmäßig {server_name} bei leerem Feld.", - "successfully_cancelled_temporary_behaviour": "Erfolgreich temporäres Verhalten aufgehoben", - "until_restart": "bis Neustart", - "video_sort_order": "Standard-Videosortierreihenfolge", - "video_sort_order_desc": "Reihenfolge, in der Videos standardmäßig sortiert werden." + "component_tagger": { + "config": { + "active_instance": "Aktive stash-box Instanz:", + "blacklist_desc": "Auf der Blacklist befindliche Objekte sind von Anfragen ausgenommen. Objekte sind reguläre Ausdrücke und Groß-/Kleinschreibung wird nicht beachtet. Manchen Zeichen muss ein Fluchtsymbol (Backslash) vorangestellt werden: {chars_require_escape}", + "blacklist_label": "Schwarze Liste", + "query_mode_auto": "Automatisch", + "query_mode_auto_desc": "Nutzt Metadaten sofern verfügbar bzw. Dateinamen", + "query_mode_dir": "Verzeichnis", + "query_mode_dir_desc": "Nutzt nur den übergeordneten Ordner", + "query_mode_filename": "Dateiname", + "query_mode_filename_desc": "Nutzt nur den Dateinamen", + "query_mode_label": "Suchmodus", + "query_mode_metadata": "Metadaten", + "query_mode_metadata_desc": "Nutzt nur Metadaten", + "query_mode_path": "Pfad", + "query_mode_path_desc": "Nutzt vollständigen Dateipfad", + "set_cover_desc": "Überschreibe Titelbild sofern verfügbar.", + "set_cover_label": "Setze Cover-Bild", + "set_tag_desc": "Hänge Tags der Szene an, entweder durch Überschreiben oder Zusammenführen mit bereits angehängten Tags.", + "set_tag_label": "Tags anhängen", + "show_male_desc": "Auswahl ob männliche Darsteller der Szene hinzugefügt werden können.", + "show_male_label": "Männliche Darsteller anzeigen", + "source": "Quelle" + }, + "noun_query": "Anfrage", + "results": { + "duration_off": "Laufzeitunterschied bei mindestens {number}sek", + "duration_unknown": "Laufzeit unbekannt", + "fp_found": "{fpCount, plural, =0 {Keine neuen Fingerabdruckübereinstimmungen gefunden} other {# neue Fingerabdruckübereinstimmungen gefunden}}", + "fp_matches": "Übereinstimmung der Laufzeit", + "fp_matches_multi": "Laufzeit stimmt mit {matchCount}/{durationsLength} Fingerabdrücken überein", + "hash_matches": "Übereinstimmung bei {hash_type}", + "match_failed_already_tagged": "Szene bereits getagged", + "match_failed_no_result": "Keine Übereinstimmungen gefunden", + "match_success": "Szene erfolgreich getagged", + "phash_matches": "{count} PHashes übereinstimmung", + "unnamed": "Unbenannt" + }, + "verb_match_fp": "Fingerabdrücke zuordnen", + "verb_matched": "zugeordnet", + "verb_scrape_all": "Alles Scrapen", + "verb_submit_fp": "Übermittele {fpCount, plural, one{# Fingerabdruck} other{# Fingerabdrücke}}", + "verb_toggle_config": "{toggle} {configuration}", + "verb_toggle_unmatched": "{toggle} nicht zugeordnete Szenen" }, - "general": { - "auth": { - "api_key": "API-Schlüssel", - "api_key_desc": "API-Schlüssel für externe Systeme. Nur nötig, falls Benutzer/Password konfiguriert. Benutzername muss vor Erzeugung des API Schlüssels gespeichert worden sein.", - "authentication": "Authentifizierung", - "clear_api_key": "API-Schlüssel löschen", - "credentials": { - "description": "Anmeldedaten, um den Zugriff auf den Stash einzuschränken.", - "heading": "Anmeldedaten" + "config": { + "about": { + "build_hash": "Hash des Builds:", + "build_time": "Zeitpunkt des Builds:", + "check_for_new_version": "Suche nach Updates", + "latest_version": "Aktuellste Version", + "latest_version_build_hash": "Neuester Build Hash:", + "new_version_notice": "[NEU]", + "release_date": "Veröffentlichungsdatum:", + "stash_discord": "Komm in unseren {url} Kanal", + "stash_home": "Stash ist beheimatet auf {url}", + "stash_open_collective": "Unterstütze uns über {url}", + "stash_wiki": "Stash {url} Seite", + "version": "Version" + }, + "application_paths": { + "heading": "Anwendungspfade" + }, + "categories": { + "about": "Über", + "changelog": "Änderungsprotokoll", + "interface": "Oberfläche", + "logs": "Protokoll", + "metadata_providers": "Metadaten-Anbieter", + "plugins": "Plugins", + "scraping": "Durchsuchen", + "security": "Sicherheit", + "services": "Dienste", + "system": "System", + "tasks": "Aufgaben", + "tools": "Werkzeuge" + }, + "dlna": { + "allow_temp_ip": "Erlaube {tempIP}", + "allowed_ip_addresses": "Erlaubte IP Adressen", + "allowed_ip_temporarily": "Temporär erlaubte IP", + "default_ip_whitelist": "Standard IP Whitelist", + "default_ip_whitelist_desc": "Standard IP Adressen, welche DLNA nutzen dürfen. Nutze {wildcard} um alle IP Adressen zu erlauben.", + "disabled_dlna_temporarily": "DLNA vorübergehend deaktiviert", + "disallowed_ip": "Unzulässige IP", + "enabled_by_default": "Standardmäßig aktiviert", + "enabled_dlna_temporarily": "DLNA vorübergehend aktiviert", + "network_interfaces": "Netzwerkoberflächen", + "network_interfaces_desc": "Netzwerkoberflächen auf denen DLNA sichtbar ist. Eine leere Liste führt dazu, dass DLNA auf allen Oberflächen ausgeführt wird. Benötigt Neustart des DLNA nach Änderungen.", + "recent_ip_addresses": "Letzte IP Adressen", + "server_display_name": "Server Anzeigename", + "server_display_name_desc": "Anzeigename des DLNA-Servers. Standardmäßig {server_name} bei leerem Feld.", + "successfully_cancelled_temporary_behaviour": "Erfolgreich temporäres Verhalten aufgehoben", + "until_restart": "bis Neustart", + "video_sort_order": "Standard-Videosortierreihenfolge", + "video_sort_order_desc": "Reihenfolge, in der Videos standardmäßig sortiert werden." + }, + "general": { + "auth": { + "api_key": "API-Schlüssel", + "api_key_desc": "API-Schlüssel für externe Systeme. Nur nötig, falls Benutzer/Password konfiguriert. Benutzername muss vor Erzeugung des API Schlüssels gespeichert worden sein.", + "authentication": "Authentifizierung", + "clear_api_key": "API-Schlüssel löschen", + "credentials": { + "description": "Anmeldedaten, um den Zugriff auf den Stash einzuschränken.", + "heading": "Anmeldedaten" + }, + "generate_api_key": "API-Schlüssel erzeugen", + "log_file": "Protokolldatei", + "log_file_desc": "Pfad zur Protokolldatei. Feld leer lassen, um Protokollierung zu deaktivieren. Benötigt Neustart.", + "log_http": "Protokolliere HTTP Zugriffe", + "log_http_desc": "Protokolliert HTTP Zugriffe im Terminal. Benötigt Neustart.", + "log_to_terminal": "Protokolliere zu Terminal", + "log_to_terminal_desc": "Protokolliert zusätzlich zur Protokolldatei auch zum Terminal. Gilt automatisch, sofern Protokolldatei deaktiviert. Benötigt Neustart.", + "maximum_session_age": "Maximale Sitzungsdauer", + "maximum_session_age_desc": "Maximale Wartezeit bis eine Login-Sitzung ausläuft, in Sekunden.", + "password": "Passwort", + "password_desc": "Passwort für den Zugriff auf Stash. Feld leer lassen, um Benutzerauthentifizierung zu deaktivieren", + "stash-box_integration": "Stash-box Einbindung", + "username": "Benutzername", + "username_desc": "Benutzername für den Zugriff auf Stash. Feld leer lassen, um Benutzerauthentifizierung zu deaktivieren" + }, + "backup_directory_path": { + "description": "Verzeichnisspeicherort für SQLite-Datenbankdateisicherungen", + "heading": "Backup-Verzeichnispfad" + }, + "blobs_path": { + "description": "Der Ort auf dem Dateisystem an dem die Binärdaten gespeichert werden. Wird nur angewendet, wenn der Binärdaten Speichertyp auf Dateisystem eingestellt ist. ACHTUNG: Eine Änderung des Pfades erfordert das manuelle Verschieben von bereits existierenden Daten.", + "heading": "Binärdaten Dateisystem-Pfad" + }, + "blobs_storage": { + "description": "Der Ort an dem Binärdaten wie Szenencover, Darsteller-, Studio- und Tag-Bilder bespeichert werden. Nach einer Änderung müssen die bereits existierenden Daten mit der Blobs migrieren Aufgabe migriert werden. Siehe Aufgaben-Seite für Migrierungen.", + "heading": "Binärdaten Speichertyp" + }, + "cache_location": "Verzeichnis für den Cache. Notwendig falls Streaming mit HLS (wie auf Apple Geräten üblich) oder DASH erfolgt.", + "cache_path_head": "Cache Pfad", + "calculate_md5_and_ohash_desc": "Berechne MD5 Prüfsumme zusätzlich zu oshash. Aktivierung führt dazu, dass erstmalige Scans mehr Zeit benötigen. Dateibenennungshash muss auf oshash gesetzt sein, um Berechnung des MD5 zu unterbinden.", + "calculate_md5_and_ohash_label": "Berechne MD5 für Videodateien", + "check_for_insecure_certificates": "Überprüfe auf unsichere Zertifikate", + "check_for_insecure_certificates_desc": "Manche Seiten nutzen unsichere SSL Zertifikate. Wenn diese Option nicht ausgewählt ist, überspringt der Scraper die Überprüfung und erlaubt das Scrapen dieser Seiten. Entfernen Sie das Häkchen, falls Sie Zertifikatsfehler beim Scrapen erhalten.", + "chrome_cdp_path": "Chrome CDP Pfad", + "chrome_cdp_path_desc": "Dateipfad zur Chrome Executable oder einer externen Adresse (beginnend mit http:// oder https://, bspw. http://localhost:9222/json/version) die auf eine Chrome Instanz zeigt.", + "create_galleries_from_folders_desc": "Wenn ausgewählt, erzeuge standardmäßig Galerien aus Verzeichnissen, welche Bilder enthalten. Erstellen Sie eine Datei .forcegallery oder .nogallery in dem Ordner, um das Verhalten für diesen zu erzwingen/unterdrücken.", + "create_galleries_from_folders_label": "Erzeuge Galerien aus Verzeichnissen mit Bilder darin", + "database": "Datenbank", + "db_path_head": "Datenbank Pfad", + "directory_locations_to_your_content": "Verzeichnis zu Ihren Inhalten", + "excluded_image_gallery_patterns_desc": "Reguläre Ausdrücke für Dateinamen/Pfade von Bildern/Galerien, welche von Scans ausgeschlossen werden und beim Aufräumen der Datenbank berücksichtigt werden sollen", + "excluded_image_gallery_patterns_head": "Schema für ausgeschlossene Bilder/Galerien", + "excluded_video_patterns_desc": "Reguläre Ausdrücke für Dateinamen/Pfade von Videos, welche von Scans ausgeschlossen werden und beim Aufräumen der Datenbank berücksichtigt werden sollen", + "excluded_video_patterns_head": "Schema für ausgeschlossene Videos", + "ffmpeg": { + "hardware_acceleration": { + "desc": "Nutzt verfügbare Hardware zum Kodieren von Video für Live-Transkodierung.", + "heading": "FFmpeg Hardware-Kodierung" + }, + "live_transcode": { + "input_args": { + "desc": "Erweitert: Zusätzliche Parameter für die Live-Transkodierung mit ffmpeg, welche vor dem Eingabefeld übergeben werden können.", + "heading": "FFmpeg Live Transcode Eingangsparameter" + }, + "output_args": { + "desc": "Erweitert: Zusätzliche Parameter für die Live-Transkodierung mit ffmpeg, welche vor dem Ausgabefeld übergeben werden können.", + "heading": "FFmpeg Live-Transkodierung Ausgangsparameter" + } + }, + "transcode": { + "input_args": { + "desc": "Erweitert: Zusätzliche Parameter für die Video-Generierung mit ffmpeg, welche vor dem Eingabefeld übergeben werden können.", + "heading": "FFmpeg Transkodierung Eingangsparameter" + }, + "output_args": { + "desc": "Erweitert: Zusätzliche Parameter für die Videogenerierung mit ffmpeg, welche vor dem Ausgabefeld übergeben werden können.", + "heading": "FFmpeg Transkodierung Ausgangsparameter" + } + } + }, + "funscript_heatmap_draw_range": "Reichweite in generierte Heatmaps einbeziehen", + "funscript_heatmap_draw_range_desc": "Zeichnet den Bewegungsbereich auf der y-Achse der erzeugten Heatmap. Vorhandene Heatmaps müssen nach der Änderung neu generiert werden.", + "gallery_cover_regex_desc": "Regulärer Ausdruck, verwendet um ein Bild als Galerietitelbild zu identifiziert", + "gallery_cover_regex_label": "Schema für Galerietitelbilder", + "gallery_ext_desc": "Durch Kommas getrennte Liste von Dateiformaten, welche als Galeriecontainer gelesen werden sollen.", + "gallery_ext_head": "Galeriecontainer Dateiformate", + "generated_file_naming_hash_desc": "Verwende MD5 oder oshash für die Benennung der generierten Dateien. Um dies zu ändern, müssen für alle Szenen der entsprechende MD5/oshash berechnet werden. Nachdem dieser Wert geändert wurde, müssen vorhandene generierte Dateien migriert oder neu generiert werden. Siehe Aufgabenseite für die Migration.", + "generated_file_naming_hash_head": "Dateinamen-Hash für generierte Dateien", + "generated_files_location": "Verzeichnisspeicherort für die generierten Dateien (Markierungen, Vorschauen, Sprites usw.)", + "generated_path_head": "Pfad für generierte Dateien", + "hashing": "Hashwertberechnung", + "heatmap_generation": "Funscript Heatmap Erzeugung", + "image_ext_desc": "Durch Kommas getrennte Liste von Dateierweiterungen, die als Bilder identifiziert werden.", + "image_ext_head": "Bilderweiterungen", + "include_audio_desc": "Binde Audiostream bei der Erstellung der Videovorschau ein.", + "include_audio_head": "Audio einbeziehen", + "logging": "Protokollierung", + "maximum_streaming_transcode_size_desc": "Maximale Größe für transcodierte Streams", + "maximum_streaming_transcode_size_head": "Maximale Streaming-Transcode-Größe", + "maximum_transcode_size_desc": "Maximale Größe für generierte Transcodes", + "maximum_transcode_size_head": "Maximale Transcodierungsgröße", + "metadata_path": { + "description": "Verzeichnis das bei einem vollständigen Export oder Import genutzt wird", + "heading": "Metadatenpfad" + }, + "number_of_parallel_task_for_scan_generation_desc": "Für die automatische Erkennung auf 0 setzen. Warnung: Mehr Aufgaben auszuführen, als erforderlich ist, um eine CPU-Auslastung von 100 % zu erreichen, verringert die Leistung und verursacht möglicherweise andere Probleme.", + "number_of_parallel_task_for_scan_generation_head": "Anzahl paralleler Tasks für Scan/Generierung", + "parallel_scan_head": "Paralleler Scan/Generierung", + "preview_generation": "Vorschau-Generierung", + "python_path": { + "description": "Ort der Python-Programmdatei. Wird für Script-Scraper und Plugins verwendet. Wenn leer, wird python aus der Umgebung aufgelöst", + "heading": "Python Pfad" + }, + "scraper_user_agent": "Scraper-Benutzeragent", + "scraper_user_agent_desc": "User-Agent-String, der während Scrape-HTTP-Anfragen verwendet wird", + "scrapers_path": { + "description": "Verzeichnis für die Konfigurationsdateien des Scrapers", + "heading": "Scraper Pfad" + }, + "scraping": "Durchsuchen", + "sqlite_location": "Dateispeicherort für die SQLite-Datenbank (erfordert Neustart). ACHTUNG: Ein Speicherort auf einem anderen System als dem Server auf dem Stash läuft (z.B. Netzwerkspeicher) wird nicht unterstützt!", + "video_ext_desc": "Durch Kommas getrennte Liste von Dateierweiterungen, die als Videos identifiziert werden.", + "video_ext_head": "Videodateiformate", + "video_head": "Video" + }, + "library": { + "exclusions": "Ausnahmen", + "gallery_and_image_options": "Galerie- und Bildoptionen", + "media_content_extensions": "Erweiterungen für Medieninhalte" + }, + "logs": { + "log_level": "Protokolllevel" + }, + "plugins": { + "hooks": "Einbindungen", + "triggers_on": "Auslösen bei" + }, + "scraping": { + "entity_metadata": "{entityType} Metadaten", + "entity_scrapers": "{entityType} Scraper", + "excluded_tag_patterns_desc": "Reguläre Audrücke von Tags zum Ausschließen von Scraping Ergebnissen", + "excluded_tag_patterns_head": "Tag Muster ausschließen", + "scraper": "Scraper", + "scrapers": "Scraper", + "search_by_name": "Suche nach Name", + "supported_types": "Unterstützte Typen", + "supported_urls": "Unterstützte Adressen" + }, + "stashbox": { + "add_instance": "Stash-Box-Instanz hinzufügen", + "api_key": "API-Schlüssel", + "description": "Stash-Box erleichtert das automatisierte Tagging von Szenen und Darstellern basierend auf Fingerabdrücken und Dateinamen.\nEndpunkt und API-Schlüssel finden Sie auf Ihrer Kontoseite in der stash-box-Instanz. Ein Name ist erforderlich, wenn mehr als eine Instanz hinzugefügt wird.", + "endpoint": "Endpunkt", + "graphql_endpoint": "GraphQL-Endpunkt", + "name": "Name", + "title": "Stash-Box-Endpunkte" }, - "generate_api_key": "API-Schlüssel erzeugen", - "log_file": "Protokolldatei", - "log_file_desc": "Pfad zur Protokolldatei. Feld leer lassen, um Protokollierung zu deaktivieren. Benötigt Neustart.", - "log_http": "Protokolliere HTTP Zugriffe", - "log_http_desc": "Protokolliert HTTP Zugriffe im Terminal. Benötigt Neustart.", - "log_to_terminal": "Protokolliere zu Terminal", - "log_to_terminal_desc": "Protokolliert zusätzlich zur Protokolldatei auch zum Terminal. Gilt automatisch, sofern Protokolldatei deaktiviert. Benötigt Neustart.", - "maximum_session_age": "Maximale Sitzungsdauer", - "maximum_session_age_desc": "Maximale Wartezeit bis eine Login-Sitzung ausläuft, in Sekunden.", - "password": "Passwort", - "password_desc": "Passwort für den Zugriff auf Stash. Feld leer lassen, um Benutzerauthentifizierung zu deaktivieren", - "stash-box_integration": "Stash-box Einbindung", - "username": "Benutzername", - "username_desc": "Benutzername für den Zugriff auf Stash. Feld leer lassen, um Benutzerauthentifizierung zu deaktivieren" - }, - "backup_directory_path": { - "description": "Verzeichnisspeicherort für SQLite-Datenbankdateisicherungen", - "heading": "Backup-Verzeichnispfad" - }, - "blobs_path": { - "description": "Der Ort auf dem Dateisystem an dem die Binärdaten gespeichert werden. Wird nur angewendet, wenn der Binärdaten Speichertyp auf Dateisystem eingestellt ist. ACHTUNG: Eine Änderung des Pfades erfordert das manuelle Verschieben von bereits existierenden Daten.", - "heading": "Binärdaten Dateisystem-Pfad" - }, - "blobs_storage": { - "description": "Der Ort an dem Binärdaten wie Szenencover, Darsteller-, Studio- und Tag-Bilder bespeichert werden. Nach einer Änderung müssen die bereits existierenden Daten mit der Blobs migrieren Aufgabe migriert werden. Siehe Aufgaben-Seite für Migrierungen.", - "heading": "Binärdaten Speichertyp" - }, - "cache_location": "Verzeichnis für den Cache. Notwendig falls Streaming mit HLS (wie auf Apple Geräten üblich) oder DASH erfolgt.", - "cache_path_head": "Cache Pfad", - "calculate_md5_and_ohash_desc": "Berechne MD5 Prüfsumme zusätzlich zu oshash. Aktivierung führt dazu, dass erstmalige Scans mehr Zeit benötigen. Dateibenennungshash muss auf oshash gesetzt sein, um Berechnung des MD5 zu unterbinden.", - "calculate_md5_and_ohash_label": "Berechne MD5 für Videodateien", - "check_for_insecure_certificates": "Überprüfe auf unsichere Zertifikate", - "check_for_insecure_certificates_desc": "Manche Seiten nutzen unsichere SSL Zertifikate. Wenn diese Option nicht ausgewählt ist, überspringt der Scraper die Überprüfung und erlaubt das Scrapen dieser Seiten. Entfernen Sie das Häkchen, falls Sie Zertifikatsfehler beim Scrapen erhalten.", - "chrome_cdp_path": "Chrome CDP Pfad", - "chrome_cdp_path_desc": "Dateipfad zur Chrome Executable oder einer externen Adresse (beginnend mit http:// oder https://, bspw. http://localhost:9222/json/version) die auf eine Chrome Instanz zeigt.", - "create_galleries_from_folders_desc": "Wenn ausgewählt, erzeuge standardmäßig Galerien aus Verzeichnissen, welche Bilder enthalten. Erstellen Sie eine Datei .forcegallery oder .nogallery in dem Ordner, um das Verhalten für diesen zu erzwingen/unterdrücken.", - "create_galleries_from_folders_label": "Erzeuge Galerien aus Verzeichnissen mit Bilder darin", - "database": "Datenbank", - "db_path_head": "Datenbank Pfad", - "directory_locations_to_your_content": "Verzeichnis zu Ihren Inhalten", - "excluded_image_gallery_patterns_desc": "Reguläre Ausdrücke für Dateinamen/Pfade von Bildern/Galerien, welche von Scans ausgeschlossen werden und beim Aufräumen der Datenbank berücksichtigt werden sollen", - "excluded_image_gallery_patterns_head": "Schema für ausgeschlossene Bilder/Galerien", - "excluded_video_patterns_desc": "Reguläre Ausdrücke für Dateinamen/Pfade von Videos, welche von Scans ausgeschlossen werden und beim Aufräumen der Datenbank berücksichtigt werden sollen", - "excluded_video_patterns_head": "Schema für ausgeschlossene Videos", - "ffmpeg": { - "hardware_acceleration": { - "desc": "Nutzt verfügbare Hardware zum Kodieren von Video für Live-Transkodierung.", - "heading": "FFmpeg Hardware-Kodierung" + "system": { + "transcoding": "Transcodierung" }, - "live_transcode": { - "input_args": { - "desc": "Erweitert: Zusätzliche Parameter für die Live-Transkodierung mit ffmpeg, welche vor dem Eingabefeld übergeben werden können.", - "heading": "FFmpeg Live Transcode Eingangsparameter" - }, - "output_args": { - "desc": "Erweitert: Zusätzliche Parameter für die Live-Transkodierung mit ffmpeg, welche vor dem Ausgabefeld übergeben werden können.", - "heading": "FFmpeg Live-Transkodierung Ausgangsparameter" - } + "tasks": { + "added_job_to_queue": "{operation_name} zur Auftragswarteschlange hinzugefügt", + "anonymise_and_download": "Erstellt eine anonymisierte Kopie der Datenbank und lädt diese im Anschluss herunter.", + "anonymise_database": "Erstellt eine Kopie der Datenbank in das Backup-Verzeichnis und anonymisiert alle empfindlichen Daten. Diese kann dann für zur Fehlersuche und -behebung geteilt werden. Die ursprüngliche Datenbank wird dabei nicht verändert. Die anonymisierte Datenbank verwendet das Dateiformat {filename_format}.", + "anonymising_database": "Anonymisiere Datenbank", + "auto_tag": { + "auto_tagging_all_paths": "Automatisches Taggen aller Pfade", + "auto_tagging_paths": "Automatisches Taggen der folgenden Pfade" + }, + "auto_tag_based_on_filenames": "Inhalte basierend auf Dateipfaden automatisch taggen.", + "auto_tagging": "Automatisches Tagging", + "backing_up_database": "Datenbank sichern", + "backup_and_download": "Führt eine Sicherung der Datenbank durch und lädt die resultierende Datei herunter.", + "backup_database": "Führt eine Sicherung der Datenbank in den Backup-Verzeichnispfad mit dem Dateiformat {filename_format} aus", + "cleanup_desc": "Suche nach fehlenden Dateien und entfernen Sie diese aus der Datenbank. Dies ist eine destruktive Aktion.", + "data_management": "Datenmanagement", + "defaults_set": "Standardeinstellungen wurden eingestellt und werden genutzt, wenn {action} Button auf der Aufgabenseite geklickt wurde.", + "dont_include_file_extension_as_part_of_the_title": "Füge keine Dateierweiterung als Teil des Titels hinzu", + "empty_queue": "Derzeit laufen keine Aufgaben.", + "export_to_json": "Exportiert den Datenbankinhalt in JSON-Format im Metadatenverzeichnis.", + "generate": { + "generating_from_paths": "Generieren der Szenen aus den folgenden Pfaden", + "generating_scenes": "Generieren für {num} {scene}" + }, + "generate_clip_previews_during_scan": "Erstelle Previews für Bild-Clips", + "generate_desc": "Generiere unterstützende Bild-, Sprite-, Video-, VTT- und andere Dateien.", + "generate_phashes_during_scan": "Generiere Wahrnehmungshashwerte", + "generate_phashes_during_scan_tooltip": "Zur Deduplizierung und Szenenerkennung.", + "generate_previews_during_scan": "Animierte Bildvorschauen erstellen", + "generate_previews_during_scan_tooltip": "Generiert animierte WebP-Vorschaubilder, nur erforderlich, wenn der Vorschautyp auf Animiertes Bild eingestellt ist.", + "generate_sprites_during_scan": "Scrubber-Sprites generieren", + "generate_thumbnails_during_scan": "Generiert Miniaturansichten für Bilder", + "generate_video_covers_during_scan": "Erzeuge Szenen-Cover", + "generate_video_previews_during_scan": "Vorschaubilder generieren", + "generate_video_previews_during_scan_tooltip": "Generiert Videovorschauen, die abgespielt werden, wenn man den Mauszeiger über eine Szene bewegt", + "generated_content": "Generierter Inhalt", + "identify": { + "and_create_missing": "und erstelle fehlende", + "create_missing": "Erstelle Fehlende", + "default_options": "Standardoptionen", + "description": "Automatisches erstellen der Szenen Metadaten durch Stash-Box und Scraper Quellen.", + "explicit_set_description": "Die folgenden Optionen werden genutzt, wenn sie nicht in der quellenspezifischen Konfiguration überschrieben worden sind.", + "field": "Feld", + "field_behaviour": "{strategy} {field}", + "field_options": "Feldoptionen", + "heading": "Identifizieren", + "identifying_from_paths": "Identifizieren von Szenen durch den folgenden Pfad", + "identifying_scenes": "Identifiziere {num} {scene}", + "include_male_performers": "Männliche Darsteller einbeziehen", + "set_cover_images": "Titelbild festlegen", + "set_organized": "Setze 'Organisiert'", + "source": "Quelle", + "source_options": "{source} Optionen", + "sources": "Quellen", + "strategy": "Strategie" + }, + "import_from_exported_json": "Import aus exportiertem JSON im Metadatenverzeichnis. Löscht die vorhandene Datenbank.", + "incremental_import": "Inkrementeller Import aus einer Export-ZIP-Datei.", + "job_queue": "Aufgabenwarteschlange", + "maintenance": "Instandhaltung", + "migrate_blobs": { + "delete_old": "Ältere Daten löschen", + "description": "Migriere Blobs auf den aktuellen Binärdaten Speichertyp. Diese Migration sollte durchgeführt werden nachdem der Binärdaten Speichertyp geändert wurde. Optional können die alten Daten nach der Migration gelöscht werden." + }, + "migrate_hash_files": "Wird nach dem Ändern des Dateinamen-Hashs für generierte Dateien verwendet, um vorhandene generierte Dateien in das neue Hash-Format umzubenennen.", + "migrate_scene_screenshots": { + "delete_files": "Screenshot-Dateien löschen", + "description": "Migriere Szenen-Screenshots in den neuen Binärdaten Speichertyp. Diese Migration sollte durchgeführt werden nachdem ein System auf die Version 0.20 geupdatet wurde. Optional können ältere Screenshot-Dateien gelöscht werden.", + "overwrite_existing": "Überschreibe existierende Binärblobs mit Screenshot-Dateien" + }, + "migrations": "Migrationen", + "only_dry_run": "Führt einen Probelauf durch. Es wird noch nichts entfernt", + "plugin_tasks": "Plugin-Aufgaben", + "scan": { + "scanning_all_paths": "Scannen aller Pfade", + "scanning_paths": "Scannen der folgenden Pfade" + }, + "scan_for_content_desc": "Suchen nach neuen Inhalten und füge sie der Datenbank hinzu.", + "set_name_date_details_from_metadata_if_present": "Name, Datum und Details aus eingebetteten Metadaten festlegen" }, - "transcode": { - "input_args": { - "desc": "Erweitert: Zusätzliche Parameter für die Video-Generierung mit ffmpeg, welche vor dem Eingabefeld übergeben werden können.", - "heading": "FFmpeg Transkodierung Eingangsparameter" - }, - "output_args": { - "desc": "Erweitert: Zusätzliche Parameter für die Videogenerierung mit ffmpeg, welche vor dem Ausgabefeld übergeben werden können.", - "heading": "FFmpeg Transkodierung Ausgangsparameter" - } + "tools": { + "scene_duplicate_checker": "Duplikatsprüfung für Szenen", + "scene_filename_parser": { + "add_field": "Feld hinzufügen", + "capitalize_title": "Titel groß schreiben", + "display_fields": "Anzeigefelder", + "escape_chars": "Verwenden Sie \\, um Literale zu maskieren", + "filename": "Dateiname", + "filename_pattern": "Dateinamenmuster", + "ignore_organized": "Ignoriere organizierte Szenen", + "ignored_words": "Ignorierte Wörter", + "matches_with": "Stimmt mit {i} überein", + "select_parser_recipe": "Parser-Rezept auswählen", + "title": "Szenendateinamen-Parser", + "whitespace_chars": "Zwischenraumzeichen", + "whitespace_chars_desc": "Diese Zeichen werden im Titel durch Zwischenraumzeichen ersetzt" + }, + "scene_tools": "Szenen-Tools" + }, + "ui": { + "abbreviate_counters": { + "description": "Verkürze Zähler in Karten und den Detail-Ansichten ab, zum Beispiel wird \"1831\" als \"1.8K\" abgekürzt.", + "heading": "Zähler verkürzen" + }, + "basic_settings": "Grundeinstellungen", + "custom_css": { + "description": "Die Seite muss neu geladen werden, damit die Änderungen wirksam werden. Es gibt keine Garantie für die Kompatibilität des benutzerdefinierten CSS und zukünftigen Versionen von Stash.", + "heading": "Benutzerdefinierte CSS", + "option_label": "Benutzerdefiniertes CSS aktiviert" + }, + "custom_javascript": { + "description": "Seite muss neu geladen werden, damit die Änderungen wirksam werden. Es gibt keine Garantie für die Kompatibilität des benutzerdefinierten Javascript und zukünftigen Versionen von Stash.", + "heading": "Benutzerdefiniertes Javascript", + "option_label": "Benutzerdefiniertes Javascript aktiviert" + }, + "custom_locales": { + "description": "Überschreibe einzelne Locale-Strings. Siehe https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/locales/en-GB.json für die Hauptliste. Die Seite muss neu geladen werden, damit die Änderungen wirksam werden.", + "heading": "Benutzerdefinierte Lokalisierung", + "option_label": "Benutzerdefinierte Lokalisierung aktiviert" + }, + "delete_options": { + "description": "Standardeinstellungen wenn Bilder, Galerien und Szenen gelöscht werden.", + "heading": "Optionen löschen", + "options": { + "delete_file": "Lösche standardmäßig die Dateien", + "delete_generated_supporting_files": "Lösche standardmäßig die generierten Hilfsdateien" + } + }, + "desktop_integration": { + "desktop_integration": "Schreibtisch Integration", + "notifications_enabled": "Benachrichtigungen aktivieren", + "send_desktop_notifications_for_events": "Bei Neuigkeiten Benachrichtigungen auf den Desktop senden", + "skip_opening_browser": "Überspringe Öffnen des Browsers", + "skip_opening_browser_on_startup": "Überspringe automatisches Öffnen des Browsers bei Start" + }, + "editing": { + "disable_dropdown_create": { + "description": "Entferne die Möglichkeit der Erstellung neuer Objekte in der Dropdown-Auswahl", + "heading": "Entferne Dropdown Erstellung" + }, + "heading": "Editieren", + "max_options_shown": { + "label": "Maximalanzahl der anzuzeigenden Objekte in Dropdown-Menüs" + }, + "rating_system": { + "star_precision": { + "label": "Präzision der Sternebewertung", + "options": { + "full": "Voll", + "half": "Halb", + "quarter": "Viertel", + "tenth": "Zehntel" + } + }, + "type": { + "label": "Art des Bewertungssystems", + "options": { + "decimal": "Dezimal", + "stars": "Sterne" + } + } + } + }, + "funscript_offset": { + "description": "Zeitversatz in Millisekunden für interaktive Skriptwiedergabe.", + "heading": "Funscript Zeitversatz (ms)" + }, + "handy_connection": { + "connect": "Verbinden", + "server_offset": { + "heading": "Server Kompensation" + }, + "status": { + "heading": "Handy Verbindungsstatus" + }, + "sync": "Synchronisieren" + }, + "handy_connection_key": { + "description": "Handy Verbindungsschlüssel für interaktive Szenen. Wenn dieser Schlüssel gesetzt wird, kann Stash aktuellen Szeneinformationen mit handyfeeling.com teilen", + "heading": "Handy Verbindungsschlüssel" + }, + "image_lightbox": { + "heading": "Bild-Lightbox" + }, + "image_wall": { + "direction": "Richtung", + "heading": "Bilderwand", + "margin": "Marge (Pixel)" + }, + "images": { + "heading": "Bilder", + "options": { + "create_image_clips_from_videos": { + "description": "Wenn für die Bibliothek Videos deaktiviert wurden, können Videodateien (Dateien mit Videodateiendungen) als Bild-Clips eingescannt werden.", + "heading": "Scannen von Videoerweiterungen als Bild-Clip" + }, + "write_image_thumbnails": { + "description": "Speichere eilig erstellte Bild-Thumbnails ab", + "heading": "Speichere Bild-Thumbnails" + } + } + }, + "interactive_options": "Interaktive Optionen", + "language": { + "heading": "Sprache" + }, + "max_loop_duration": { + "description": "Maximale Szenendauer, bei der das Video wiederholt wird – 0 zum Deaktivieren", + "heading": "Maximale Schleifendauer" + }, + "menu_items": { + "description": "Anzeigen oder Ausblenden verschiedener Inhaltstypen in der Navigationsleiste", + "heading": "Menüpunkte" + }, + "minimum_play_percent": { + "description": "Der prozentuale Anteil der Zeit, in der eine Szene gespielt werden muss, bevor Abspielen gezählt wird ist erhöht worden.", + "heading": "Mindestabspieldauer (Prozent)" + }, + "performers": { + "options": { + "image_location": { + "description": "Benutzerdefinierter Pfad zu den standardmäßigen Darstellerbildern. Leer lassen, um mitgelieferte Darstellerbilder zu verwenden", + "heading": "Benutzerdefinierter Pfad zu Darstellerbildern" + } + } + }, + "preview_type": { + "description": "Konfiguration für Szenenwand", + "heading": "Vorschautyp", + "options": { + "animated": "Animiertes Bild", + "static": "Statisches Bild", + "video": "Video" + } + }, + "scene_list": { + "heading": "Szenenliste", + "options": { + "show_studio_as_text": "Studios als Text anzeigen" + } + }, + "scene_player": { + "heading": "Szenenplayer", + "options": { + "always_start_from_beginning": "Video immer von Anfang an starten", + "auto_start_video": "Video automatisch starten", + "auto_start_video_on_play_selected": { + "description": "Automatischer Start von Videos aus der Warteschlange oder bei einer Wiedergabe von ausgewählten oder zufälligen Videos von der Szenen-Seite", + "heading": "Automatische Wiedergabe von ausgewählten Videos" + }, + "continue_playlist_default": { + "description": "Nächste Szene in der Warteschlange spielen", + "heading": "Standardmäßig die Wiedergabeliste fortsetzen" + }, + "show_scrubber": "Scrubber anzeigen", + "track_activity": "Aktivität verfolgen", + "vr_tag": { + "description": "Der VR-Knopf wird nur für Szenen mit diesem Tag angezeigt.", + "heading": "VR Tag" + } + } + }, + "scene_wall": { + "heading": "Szenen-/Markierungswand", + "options": { + "display_title": "Titel und Tags anzeigen", + "toggle_sound": "Sound einschalten" + } + }, + "scroll_attempts_before_change": { + "description": "Anzahl der Versuche, einen Bildlauf durchzuführen, bevor zum nächsten/vorherigen Element gewechselt wird. Gilt nur für den Bildlaufmodus Schwenkung Y.", + "heading": "Anzahl Scroll-Versuche vor Übergang" + }, + "show_tag_card_on_hover": { + "description": "Tag-Karte anzeigen, wenn der Mauszeiger über Tag-Abzeichen bewegt wird", + "heading": "Tag-Karten-Tooltips" + }, + "slideshow_delay": { + "description": "Die Diashow ist in Galerien in der Wandansicht verfügbar", + "heading": "Verzögerung der Diashow (Sekunden)" + }, + "studio_panel": { + "heading": "Studioansicht", + "options": { + "show_child_studio_content": { + "description": "In der Studioansicht, zeige auch Inhalte von Unterstudios", + "heading": "Zeige Inhalte von Unterstudios" + } + } + }, + "tag_panel": { + "heading": "Tag-Ansicht", + "options": { + "show_child_tagged_content": { + "description": "In der Tag-Ansicht, zeige auch Inhalte der Sub-Tags", + "heading": "Zeige Sub-Tag Inhalte" + } + } + }, + "title": "Benutzeroberfläche" } - }, - "funscript_heatmap_draw_range": "Reichweite in generierte Heatmaps einbeziehen", - "funscript_heatmap_draw_range_desc": "Zeichnet den Bewegungsbereich auf der y-Achse der erzeugten Heatmap. Vorhandene Heatmaps müssen nach der Änderung neu generiert werden.", - "gallery_cover_regex_desc": "Regulärer Ausdruck, verwendet um ein Bild als Galerietitelbild zu identifiziert", - "gallery_cover_regex_label": "Schema für Galerietitelbilder", - "gallery_ext_desc": "Durch Kommas getrennte Liste von Dateiformaten, welche als Galeriecontainer gelesen werden sollen.", - "gallery_ext_head": "Galeriecontainer Dateiformate", - "generated_file_naming_hash_desc": "Verwende MD5 oder oshash für die Benennung der generierten Dateien. Um dies zu ändern, müssen für alle Szenen der entsprechende MD5/oshash berechnet werden. Nachdem dieser Wert geändert wurde, müssen vorhandene generierte Dateien migriert oder neu generiert werden. Siehe Aufgabenseite für die Migration.", - "generated_file_naming_hash_head": "Dateinamen-Hash für generierte Dateien", - "generated_files_location": "Verzeichnisspeicherort für die generierten Dateien (Markierungen, Vorschauen, Sprites usw.)", - "generated_path_head": "Pfad für generierte Dateien", - "hashing": "Hashwertberechnung", - "heatmap_generation": "Funscript Heatmap Erzeugung", - "image_ext_desc": "Durch Kommas getrennte Liste von Dateierweiterungen, die als Bilder identifiziert werden.", - "image_ext_head": "Bilderweiterungen", - "include_audio_desc": "Binde Audiostream bei der Erstellung der Videovorschau ein.", - "include_audio_head": "Audio einbeziehen", - "logging": "Protokollierung", - "maximum_streaming_transcode_size_desc": "Maximale Größe für transcodierte Streams", - "maximum_streaming_transcode_size_head": "Maximale Streaming-Transcode-Größe", - "maximum_transcode_size_desc": "Maximale Größe für generierte Transcodes", - "maximum_transcode_size_head": "Maximale Transcodierungsgröße", - "metadata_path": { - "description": "Verzeichnis das bei einem vollständigen Export oder Import genutzt wird", - "heading": "Metadatenpfad" - }, - "number_of_parallel_task_for_scan_generation_desc": "Für die automatische Erkennung auf 0 setzen. Warnung: Mehr Aufgaben auszuführen, als erforderlich ist, um eine CPU-Auslastung von 100 % zu erreichen, verringert die Leistung und verursacht möglicherweise andere Probleme.", - "number_of_parallel_task_for_scan_generation_head": "Anzahl paralleler Tasks für Scan/Generierung", - "parallel_scan_head": "Paralleler Scan/Generierung", - "preview_generation": "Vorschau-Generierung", - "python_path": { - "description": "Ort der Python-Programmdatei. Wird für Script-Scraper und Plugins verwendet. Wenn leer, wird python aus der Umgebung aufgelöst", - "heading": "Python Pfad" - }, - "scraper_user_agent": "Scraper-Benutzeragent", - "scraper_user_agent_desc": "User-Agent-String, der während Scrape-HTTP-Anfragen verwendet wird", - "scrapers_path": { - "description": "Verzeichnis für die Konfigurationsdateien des Scrapers", - "heading": "Scraper Pfad" - }, - "scraping": "Durchsuchen", - "sqlite_location": "Dateispeicherort für die SQLite-Datenbank (erfordert Neustart). ACHTUNG: Ein Speicherort auf einem anderen System als dem Server auf dem Stash läuft (z.B. Netzwerkspeicher) wird nicht unterstützt!", - "video_ext_desc": "Durch Kommas getrennte Liste von Dateierweiterungen, die als Videos identifiziert werden.", - "video_ext_head": "Videodateiformate", - "video_head": "Video" - }, - "library": { - "exclusions": "Ausnahmen", - "gallery_and_image_options": "Galerie- und Bildoptionen", - "media_content_extensions": "Erweiterungen für Medieninhalte" - }, - "logs": { - "log_level": "Protokolllevel" }, - "plugins": { - "hooks": "Einbindungen", - "triggers_on": "Auslösen bei" + "configuration": "Konfiguration", + "countables": { + "files": "{count, plural, one {Datei} other {Dateien}}", + "galleries": "{count, plural, one {Galerie} other {Galerien}}", + "images": "{count, plural, one {Bild} other {Bilder}}", + "markers": "{count, plural, one {Markierung} other {Markierungen}}", + "movies": "{count, plural, one {Film} other {Filme}}", + "performers": "{count, plural, one {Darsteller} other {Darsteller}}", + "scenes": "{count, plural, one {Szene} other {Szenen}}", + "studios": "{count, plural, one {Studio} other {Studios}}", + "tags": "{count, plural, one {Tag} other {Tags}}" }, - "scraping": { - "entity_metadata": "{entityType} Metadaten", - "entity_scrapers": "{entityType} Scraper", - "excluded_tag_patterns_desc": "Reguläre Audrücke von Tags zum Ausschließen von Scraping Ergebnissen", - "excluded_tag_patterns_head": "Tag Muster ausschließen", - "scraper": "Scraper", - "scrapers": "Scraper", - "search_by_name": "Suche nach Name", - "supported_types": "Unterstützte Typen", - "supported_urls": "Unterstützte Adressen" - }, - "stashbox": { - "add_instance": "Stash-Box-Instanz hinzufügen", - "api_key": "API-Schlüssel", - "description": "Stash-Box erleichtert das automatisierte Tagging von Szenen und Darstellern basierend auf Fingerabdrücken und Dateinamen.\nEndpunkt und API-Schlüssel finden Sie auf Ihrer Kontoseite in der stash-box-Instanz. Ein Name ist erforderlich, wenn mehr als eine Instanz hinzugefügt wird.", - "endpoint": "Endpunkt", - "graphql_endpoint": "GraphQL-Endpunkt", - "name": "Name", - "title": "Stash-Box-Endpunkte" + "country": "Land", + "cover_image": "Titelbild", + "created_at": "Erstellt am", + "criterion": { + "greater_than": "Größer als", + "less_than": "Weniger als", + "value": "Wert" }, - "system": { - "transcoding": "Transcodierung" + "criterion_modifier": { + "between": "zwischen", + "equals": "ist", + "excludes": "schließt aus", + "format_string": "{criterion} {modifierString} {valueString}", + "format_string_depth": "{criterion} {modifierString} {valueString} (+{depth, plural, =-1 {Alle} other {{Tiefe}}})", + "format_string_excludes": "{criterion} {modifierString} {valueString} (ausgenommen {excludedString})", + "format_string_excludes_depth": "{criterion} {modifierString} {valueString} (ausgenommen {excludedString}) (+{depth, plural, =-1 {Alle} other {{Tiefe}}})", + "greater_than": "ist größer als", + "includes": "beinhaltet", + "includes_all": "beinhaltet alles", + "is_null": "ist nichts", + "less_than": "ist weniger als", + "matches_regex": "stimmt mit Regex überein", + "not_between": "nicht zwischen", + "not_equals": "ist nicht", + "not_matches_regex": "stimmt nicht mit Regex überein", + "not_null": "ist nicht nichts" }, - "tasks": { - "added_job_to_queue": "{operation_name} zur Auftragswarteschlange hinzugefügt", - "anonymise_and_download": "Erstellt eine anonymisierte Kopie der Datenbank und lädt diese im Anschluss herunter.", - "anonymise_database": "Erstellt eine Kopie der Datenbank in das Backup-Verzeichnis und anonymisiert alle empfindlichen Daten. Diese kann dann für zur Fehlersuche und -behebung geteilt werden. Die ursprüngliche Datenbank wird dabei nicht verändert. Die anonymisierte Datenbank verwendet das Dateiformat {filename_format}.", - "anonymising_database": "Anonymisiere Datenbank", - "auto_tag": { - "auto_tagging_all_paths": "Automatisches Taggen aller Pfade", - "auto_tagging_paths": "Automatisches Taggen der folgenden Pfade" - }, - "auto_tag_based_on_filenames": "Inhalte basierend auf Dateipfaden automatisch taggen.", - "auto_tagging": "Automatisches Tagging", - "backing_up_database": "Datenbank sichern", - "backup_and_download": "Führt eine Sicherung der Datenbank durch und lädt die resultierende Datei herunter.", - "backup_database": "Führt eine Sicherung der Datenbank in den Backup-Verzeichnispfad mit dem Dateiformat {filename_format} aus", - "cleanup_desc": "Suche nach fehlenden Dateien und entfernen Sie diese aus der Datenbank. Dies ist eine destruktive Aktion.", - "data_management": "Datenmanagement", - "defaults_set": "Standardeinstellungen wurden eingestellt und werden genutzt, wenn {action} Button auf der Aufgabenseite geklickt wurde.", - "dont_include_file_extension_as_part_of_the_title": "Füge keine Dateierweiterung als Teil des Titels hinzu", - "empty_queue": "Derzeit laufen keine Aufgaben.", - "export_to_json": "Exportiert den Datenbankinhalt in JSON-Format im Metadatenverzeichnis.", - "generate": { - "generating_from_paths": "Generieren der Szenen aus den folgenden Pfaden", - "generating_scenes": "Generieren für {num} {scene}" - }, - "generate_clip_previews_during_scan": "Erstelle Previews für Bild-Clips", - "generate_desc": "Generiere unterstützende Bild-, Sprite-, Video-, VTT- und andere Dateien.", - "generate_phashes_during_scan": "Generiere Wahrnehmungshashwerte", - "generate_phashes_during_scan_tooltip": "Zur Deduplizierung und Szenenerkennung.", - "generate_previews_during_scan": "Animierte Bildvorschauen erstellen", - "generate_previews_during_scan_tooltip": "Generiert animierte WebP-Vorschaubilder, nur erforderlich, wenn der Vorschautyp auf Animiertes Bild eingestellt ist.", - "generate_sprites_during_scan": "Scrubber-Sprites generieren", - "generate_thumbnails_during_scan": "Generiert Miniaturansichten für Bilder", - "generate_video_covers_during_scan": "Erzeuge Szenen-Cover", - "generate_video_previews_during_scan": "Vorschaubilder generieren", - "generate_video_previews_during_scan_tooltip": "Generiert Videovorschauen, die abgespielt werden, wenn man den Mauszeiger über eine Szene bewegt", - "generated_content": "Generierter Inhalt", - "identify": { - "and_create_missing": "und erstelle fehlende", - "create_missing": "Erstelle Fehlende", - "default_options": "Standardoptionen", - "description": "Automatisches erstellen der Szenen Metadaten durch Stash-Box und Scraper Quellen.", - "explicit_set_description": "Die folgenden Optionen werden genutzt, wenn sie nicht in der quellenspezifischen Konfiguration überschrieben worden sind.", - "field": "Feld", - "field_behaviour": "{strategy} {field}", - "field_options": "Feldoptionen", - "heading": "Identifizieren", - "identifying_from_paths": "Identifizieren von Szenen durch den folgenden Pfad", - "identifying_scenes": "Identifiziere {num} {scene}", - "include_male_performers": "Männliche Darsteller einbeziehen", - "set_cover_images": "Titelbild festlegen", - "set_organized": "Setze 'Organisiert'", - "source": "Quelle", - "source_options": "{source} Optionen", - "sources": "Quellen", - "strategy": "Strategie" - }, - "import_from_exported_json": "Import aus exportiertem JSON im Metadatenverzeichnis. Löscht die vorhandene Datenbank.", - "incremental_import": "Inkrementeller Import aus einer Export-ZIP-Datei.", - "job_queue": "Aufgabenwarteschlange", - "maintenance": "Instandhaltung", - "migrate_blobs": { - "delete_old": "Ältere Daten löschen", - "description": "Migriere Blobs auf den aktuellen Binärdaten Speichertyp. Diese Migration sollte durchgeführt werden nachdem der Binärdaten Speichertyp geändert wurde. Optional können die alten Daten nach der Migration gelöscht werden." - }, - "migrate_hash_files": "Wird nach dem Ändern des Dateinamen-Hashs für generierte Dateien verwendet, um vorhandene generierte Dateien in das neue Hash-Format umzubenennen.", - "migrate_scene_screenshots": { - "delete_files": "Screenshot-Dateien löschen", - "description": "Migriere Szenen-Screenshots in den neuen Binärdaten Speichertyp. Diese Migration sollte durchgeführt werden nachdem ein System auf die Version 0.20 geupdatet wurde. Optional können ältere Screenshot-Dateien gelöscht werden.", - "overwrite_existing": "Überschreibe existierende Binärblobs mit Screenshot-Dateien" - }, - "migrations": "Migrationen", - "only_dry_run": "Führt einen Probelauf durch. Es wird noch nichts entfernt", - "plugin_tasks": "Plugin-Aufgaben", - "scan": { - "scanning_all_paths": "Scannen aller Pfade", - "scanning_paths": "Scannen der folgenden Pfade" - }, - "scan_for_content_desc": "Suchen nach neuen Inhalten und füge sie der Datenbank hinzu.", - "set_name_date_details_from_metadata_if_present": "Name, Datum und Details aus eingebetteten Metadaten festlegen" - }, - "tools": { - "scene_duplicate_checker": "Duplikatsprüfung für Szenen", - "scene_filename_parser": { - "add_field": "Feld hinzufügen", - "capitalize_title": "Titel groß schreiben", - "display_fields": "Anzeigefelder", - "escape_chars": "Verwenden Sie \\, um Literale zu maskieren", - "filename": "Dateiname", - "filename_pattern": "Dateinamenmuster", - "ignore_organized": "Ignoriere organizierte Szenen", - "ignored_words": "Ignorierte Wörter", - "matches_with": "Stimmt mit {i} überein", - "select_parser_recipe": "Parser-Rezept auswählen", - "title": "Szenendateinamen-Parser", - "whitespace_chars": "Zwischenraumzeichen", - "whitespace_chars_desc": "Diese Zeichen werden im Titel durch Zwischenraumzeichen ersetzt" - }, - "scene_tools": "Szenen-Tools" - }, - "ui": { - "abbreviate_counters": { - "description": "Verkürze Zähler in Karten und den Detail-Ansichten ab, zum Beispiel wird \"1831\" als \"1.8K\" abgekürzt.", - "heading": "Zähler verkürzen" - }, - "basic_settings": "Grundeinstellungen", - "custom_css": { - "description": "Die Seite muss neu geladen werden, damit die Änderungen wirksam werden. Es gibt keine Garantie für die Kompatibilität des benutzerdefinierten CSS und zukünftigen Versionen von Stash.", - "heading": "Benutzerdefinierte CSS", - "option_label": "Benutzerdefiniertes CSS aktiviert" - }, - "custom_javascript": { - "description": "Seite muss neu geladen werden, damit die Änderungen wirksam werden. Es gibt keine Garantie für die Kompatibilität des benutzerdefinierten Javascript und zukünftigen Versionen von Stash.", - "heading": "Benutzerdefiniertes Javascript", - "option_label": "Benutzerdefiniertes Javascript aktiviert" - }, - "custom_locales": { - "description": "Überschreibe einzelne Locale-Strings. Siehe https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/locales/en-GB.json für die Hauptliste. Die Seite muss neu geladen werden, damit die Änderungen wirksam werden.", - "heading": "Benutzerdefinierte Lokalisierung", - "option_label": "Benutzerdefinierte Lokalisierung aktiviert" - }, - "delete_options": { - "description": "Standardeinstellungen wenn Bilder, Galerien und Szenen gelöscht werden.", - "heading": "Optionen löschen", - "options": { - "delete_file": "Lösche standardmäßig die Dateien", - "delete_generated_supporting_files": "Lösche standardmäßig die generierten Hilfsdateien" - } - }, - "desktop_integration": { - "desktop_integration": "Schreibtisch Integration", - "notifications_enabled": "Benachrichtigungen aktivieren", - "send_desktop_notifications_for_events": "Bei Neuigkeiten Benachrichtigungen auf den Desktop senden", - "skip_opening_browser": "Überspringe Öffnen des Browsers", - "skip_opening_browser_on_startup": "Überspringe automatisches Öffnen des Browsers bei Start" - }, - "editing": { - "disable_dropdown_create": { - "description": "Entferne die Möglichkeit der Erstellung neuer Objekte in der Dropdown-Auswahl", - "heading": "Entferne Dropdown Erstellung" + "custom": "Benutzerdefiniert", + "date": "Datum", + "date_format": "YYYY-MM-DD", + "datetime_format": "YYYY-MM-DD HH:MM", + "death_date": "Todesdatum", + "death_year": "Todesjahr", + "descending": "Absteigend", + "description": "Beschreibung", + "detail": "Detail", + "details": "Details", + "developmentVersion": "Entwicklungsversion", + "dialogs": { + "create_new_entity": "Neues {entity} erstellen", + "delete_alert": "Folgende {count, plural, one {{singularEntity}} other {{pluralEntity}}} werden dauerhaft gelöscht:", + "delete_confirm": "Möchten Sie {entityName} wirklich löschen?", + "delete_entity_desc": "{count, plural, one {Möchten Sie {singularEntity} wirklich löschen? Sofern die Datei nicht ebenfalls gelöscht werden soll, wird diese {singularEntity} beim Scannen wieder hinzugefügt.} other {Möchten Sie {pluralEntity} wirklich löschen? Sofern die Dateien nicht ebenfalls gelöscht werden sollen, werden diese {pluralEntity} beim Scannen wieder hinzugefügt.}}", + "delete_entity_simple_desc": "{count, plural, one {Möchten Sie {singularEntity} wirklich löschen?} other {Möchten Sie diese {pluralEntity} wirklich löschen?}}", + "delete_entity_title": "{count, plural, one {Lösche {singularEntity}} other {Lösche {pluralEntity}}}", + "delete_galleries_extra": "…plus allen Bilddateien, die keiner anderen Galerie angehängt sind.", + "delete_gallery_files": "Lösche Galerieordner/zip Datei und alle Bilder, die keiner anderen Galerie angehängt sind.", + "delete_object_desc": "Möchten Sie {count, plural, one {diese {singularEntity}} other {diese {pluralEntity}}} wirklich löschen?", + "delete_object_overflow": "…und {count} other {count, plural, one {{singularEntity}} other {{pluralEntity}}}.", + "delete_object_title": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} löschen", + "dont_show_until_updated": "Bis zum nächsten Update nicht mehr anzeigen", + "edit_entity_title": "Bearbeiten von {count, plural, one {{singularEntity}} other {{pluralEntity}}}", + "export_include_related_objects": "Zugehörige Objekte in den Export einbeziehen", + "export_title": "Export", + "imagewall": { + "direction": { + "column": "Spalten", + "description": "Spalten- oder Reihenlayout.", + "row": "Zeilen" + }, + "margin_desc": "Anzahl der Marge (in Pixeln) um jedes Bild." }, - "heading": "Editieren", - "max_options_shown": { - "label": "Maximalanzahl der anzuzeigenden Objekte in Dropdown-Menüs" - }, - "rating_system": { - "star_precision": { - "label": "Präzision der Sternebewertung", - "options": { - "full": "Voll", - "half": "Halb", - "quarter": "Viertel", - "tenth": "Zehntel" - } - }, - "type": { - "label": "Art des Bewertungssystems", - "options": { - "decimal": "Dezimal", - "stars": "Sterne" + "lightbox": { + "delay": "Verzögerung (Sek)", + "display_mode": { + "fit_horizontally": "Horizontal anpassen", + "fit_to_screen": "Vollbild", + "label": "Anzeigemodus", + "original": "Original" + }, + "options": "Optionen", + "page_header": "Seite {page} / {total}", + "reset_zoom_on_nav": "Zoomstufe beim Bildwechsel zurücksetzen", + "scale_up": { + "description": "Skaliere kleinere Bilder auf Bildschirmgröße", + "label": "Skalieren, um zu passen" + }, + "scroll_mode": { + "description": "Halte die Umschalttaste gedrückt, um einen anderen Modus vorübergehend zu verwenden.", + "label": "Scroll-Modus", + "pan_y": "Schwenkung Y", + "zoom": "Zoomen" } - } - } - }, - "funscript_offset": { - "description": "Zeitversatz in Millisekunden für interaktive Skriptwiedergabe.", - "heading": "Funscript Zeitversatz (ms)" - }, - "handy_connection": { - "connect": "Verbinden", - "server_offset": { - "heading": "Server Kompensation" }, - "status": { - "heading": "Handy Verbindungsstatus" + "merge": { + "destination": "Ziel", + "empty_results": "Die Werte der Zielfelder bleiben unverändert.", + "source": "Quelle" }, - "sync": "Synchronisieren" - }, - "handy_connection_key": { - "description": "Handy Verbindungsschlüssel für interaktive Szenen. Wenn dieser Schlüssel gesetzt wird, kann Stash aktuellen Szeneinformationen mit handyfeeling.com teilen", - "heading": "Handy Verbindungsschlüssel" - }, - "image_lightbox": { - "heading": "Bild-Lightbox" - }, - "image_wall": { - "direction": "Richtung", - "heading": "Bilderwand", - "margin": "Marge (Pixel)" - }, - "images": { - "heading": "Bilder", - "options": { - "create_image_clips_from_videos": { - "description": "Wenn für die Bibliothek Videos deaktiviert wurden, können Videodateien (Dateien mit Videodateiendungen) als Bild-Clips eingescannt werden.", - "heading": "Scannen von Videoerweiterungen als Bild-Clip" - }, - "write_image_thumbnails": { - "description": "Speichere eilig erstellte Bild-Thumbnails ab", - "heading": "Speichere Bild-Thumbnails" - } - } - }, - "interactive_options": "Interaktive Optionen", - "language": { - "heading": "Sprache" - }, - "max_loop_duration": { - "description": "Maximale Szenendauer, bei der das Video wiederholt wird – 0 zum Deaktivieren", - "heading": "Maximale Schleifendauer" - }, - "menu_items": { - "description": "Anzeigen oder Ausblenden verschiedener Inhaltstypen in der Navigationsleiste", - "heading": "Menüpunkte" - }, - "minimum_play_percent": { - "description": "Der prozentuale Anteil der Zeit, in der eine Szene gespielt werden muss, bevor Abspielen gezählt wird ist erhöht worden.", - "heading": "Mindestabspieldauer (Prozent)" - }, - "performers": { - "options": { - "image_location": { - "description": "Benutzerdefinierter Pfad zu den standardmäßigen Darstellerbildern. Leer lassen, um mitgelieferte Darstellerbilder zu verwenden", - "heading": "Benutzerdefinierter Pfad zu Darstellerbildern" - } - } - }, - "preview_type": { - "description": "Konfiguration für Szenenwand", - "heading": "Vorschautyp", - "options": { - "animated": "Animiertes Bild", - "static": "Statisches Bild", - "video": "Video" - } - }, - "scene_list": { - "heading": "Szenenliste", - "options": { - "show_studio_as_text": "Studios als Text anzeigen" - } - }, - "scene_player": { - "heading": "Szenenplayer", - "options": { - "always_start_from_beginning": "Video immer von Anfang an starten", - "auto_start_video": "Video automatisch starten", - "auto_start_video_on_play_selected": { - "description": "Automatischer Start von Videos aus der Warteschlange oder bei einer Wiedergabe von ausgewählten oder zufälligen Videos von der Szenen-Seite", - "heading": "Automatische Wiedergabe von ausgewählten Videos" - }, - "continue_playlist_default": { - "description": "Nächste Szene in der Warteschlange spielen", - "heading": "Standardmäßig die Wiedergabeliste fortsetzen" - }, - "show_scrubber": "Scrubber anzeigen", - "track_activity": "Aktivität verfolgen", - "vr_tag": { - "description": "Der VR-Knopf wird nur für Szenen mit diesem Tag angezeigt.", - "heading": "VR Tag" - } - } - }, - "scene_wall": { - "heading": "Szenen-/Markierungswand", - "options": { - "display_title": "Titel und Tags anzeigen", - "toggle_sound": "Sound einschalten" - } - }, - "scroll_attempts_before_change": { - "description": "Anzahl der Versuche, einen Bildlauf durchzuführen, bevor zum nächsten/vorherigen Element gewechselt wird. Gilt nur für den Bildlaufmodus Schwenkung Y.", - "heading": "Anzahl Scroll-Versuche vor Übergang" - }, - "show_tag_card_on_hover": { - "description": "Tag-Karte anzeigen, wenn der Mauszeiger über Tag-Abzeichen bewegt wird", - "heading": "Tag-Karten-Tooltips" - }, - "slideshow_delay": { - "description": "Die Diashow ist in Galerien in der Wandansicht verfügbar", - "heading": "Verzögerung der Diashow (Sekunden)" - }, - "studio_panel": { - "heading": "Studioansicht", - "options": { - "show_child_studio_content": { - "description": "In der Studioansicht, zeige auch Inhalte von Unterstudios", - "heading": "Zeige Inhalte von Unterstudios" - } - } - }, - "tag_panel": { - "heading": "Tag-Ansicht", - "options": { - "show_child_tagged_content": { - "description": "In der Tag-Ansicht, zeige auch Inhalte der Sub-Tags", - "heading": "Zeige Sub-Tag Inhalte" - } - } - }, - "title": "Benutzeroberfläche" - } - }, - "configuration": "Konfiguration", - "countables": { - "files": "{count, plural, one {Datei} other {Dateien}}", - "galleries": "{count, plural, one {Galerie} other {Galerien}}", - "images": "{count, plural, one {Bild} other {Bilder}}", - "markers": "{count, plural, one {Markierung} other {Markierungen}}", - "movies": "{count, plural, one {Film} other {Filme}}", - "performers": "{count, plural, one {Darsteller} other {Darsteller}}", - "scenes": "{count, plural, one {Szene} other {Szenen}}", - "studios": "{count, plural, one {Studio} other {Studios}}", - "tags": "{count, plural, one {Tag} other {Tags}}" - }, - "country": "Land", - "cover_image": "Titelbild", - "created_at": "Erstellt am", - "criterion": { - "greater_than": "Größer als", - "less_than": "Weniger als", - "value": "Wert" - }, - "criterion_modifier": { - "between": "zwischen", - "equals": "ist", - "excludes": "schließt aus", - "format_string": "{criterion} {modifierString} {valueString}", - "format_string_depth": "{criterion} {modifierString} {valueString} (+{depth, plural, =-1 {Alle} other {{Tiefe}}})", - "format_string_excludes": "{criterion} {modifierString} {valueString} (ausgenommen {excludedString})", - "format_string_excludes_depth": "{criterion} {modifierString} {valueString} (ausgenommen {excludedString}) (+{depth, plural, =-1 {Alle} other {{Tiefe}}})", - "greater_than": "ist größer als", - "includes": "beinhaltet", - "includes_all": "beinhaltet alles", - "is_null": "ist nichts", - "less_than": "ist weniger als", - "matches_regex": "stimmt mit Regex überein", - "not_between": "nicht zwischen", - "not_equals": "ist nicht", - "not_matches_regex": "stimmt nicht mit Regex überein", - "not_null": "ist nicht nichts" - }, - "custom": "Benutzerdefiniert", - "date": "Datum", - "date_format": "YYYY-MM-DD", - "datetime_format": "YYYY-MM-DD HH:MM", - "death_date": "Todesdatum", - "death_year": "Todesjahr", - "descending": "Absteigend", - "description": "Beschreibung", - "detail": "Detail", - "details": "Details", - "developmentVersion": "Entwicklungsversion", - "dialogs": { - "create_new_entity": "Neues {entity} erstellen", - "delete_alert": "Folgende {count, plural, one {{singularEntity}} other {{pluralEntity}}} werden dauerhaft gelöscht:", - "delete_confirm": "Möchten Sie {entityName} wirklich löschen?", - "delete_entity_desc": "{count, plural, one {Möchten Sie {singularEntity} wirklich löschen? Sofern die Datei nicht ebenfalls gelöscht werden soll, wird diese {singularEntity} beim Scannen wieder hinzugefügt.} other {Möchten Sie {pluralEntity} wirklich löschen? Sofern die Dateien nicht ebenfalls gelöscht werden sollen, werden diese {pluralEntity} beim Scannen wieder hinzugefügt.}}", - "delete_entity_simple_desc": "{count, plural, one {Möchten Sie {singularEntity} wirklich löschen?} other {Möchten Sie diese {pluralEntity} wirklich löschen?}}", - "delete_entity_title": "{count, plural, one {Lösche {singularEntity}} other {Lösche {pluralEntity}}}", - "delete_galleries_extra": "…plus allen Bilddateien, die keiner anderen Galerie angehängt sind.", - "delete_gallery_files": "Lösche Galerieordner/zip Datei und alle Bilder, die keiner anderen Galerie angehängt sind.", - "delete_object_desc": "Möchten Sie {count, plural, one {diese {singularEntity}} other {diese {pluralEntity}}} wirklich löschen?", - "delete_object_overflow": "…und {count} other {count, plural, one {{singularEntity}} other {{pluralEntity}}}.", - "delete_object_title": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} löschen", - "dont_show_until_updated": "Bis zum nächsten Update nicht mehr anzeigen", - "edit_entity_title": "Bearbeiten von {count, plural, one {{singularEntity}} other {{pluralEntity}}}", - "export_include_related_objects": "Zugehörige Objekte in den Export einbeziehen", - "export_title": "Export", - "imagewall": { - "direction": { - "column": "Spalten", - "description": "Spalten- oder Reihenlayout.", - "row": "Zeilen" - }, - "margin_desc": "Anzahl der Marge (in Pixeln) um jedes Bild." + "merge_tags": { + "destination": "Ziel", + "source": "Quelle" + }, + "overwrite_filter_confirm": "Möchten Sie die vorhandene gespeicherte Anfrage {entityName} wirklich überschreiben?", + "reassign_entity_title": "{count, plural, one {Weise {singularEntity} neu zu} other {Weise {pluralEntity} neu zu}}}", + "reassign_files": { + "destination": "Neu zuweisen an" + }, + "scene_gen": { + "clip_previews": "Bild-Clip Vorschau", + "covers": "Szene-Cover", + "force_transcodes": "Transcode Erzeugung erzwingen", + "force_transcodes_tooltip": "Standardmäßig werden Transkodierungen nur erzeugt, wenn die Videodatei im Browser nicht unterstützt wird. Wenn diese Option aktiviert ist, werden Transkodierungen auch dann erstellt, wenn die Videodatei vom Browser unterstützt zu werden scheint.", + "image_previews": "Animierte Bildvorschauen", + "image_previews_tooltip": "Animierte WebP-Vorschaubilder, nur erforderlich, wenn der Vorschautyp auf Animiertes Bild eingestellt ist.", + "interactive_heatmap_speed": "Erzeugen von Heatmaps und Geschwindigkeiten für interaktive Szenen", + "marker_image_previews": "Animierte Vorschau für Markierungen", + "marker_image_previews_tooltip": "Animierte WebP-Vorschau für Markierungen, nur erforderlich, wenn der Vorschautyp auf Animiertes Bild eingestellt ist.", + "marker_screenshots": "Screenshots für Markierungen", + "marker_screenshots_tooltip": "Statische JPG-Bilder für Markierungen, nur erforderlich, wenn der Vorschautyp auf Statisches Bild eingestellt ist.", + "markers": "Vorschau für Markierungen", + "markers_tooltip": "20-Sekunden-Videos, die zum angegebenen Zeitpunkt beginnen.", + "override_preview_generation_options": "Überschreibe Optionen zur Erstellung von Vorschauen", + "override_preview_generation_options_desc": "Überschreibe die Optionen zur Erstellung von Vorschauen für diesen Vorgang. Die Standardeinstellungen werden unter System -> \tVorschau-Generierung festgelegt.", + "overwrite": "Vorhandene Dateien überschreiben", + "phash": "Perzeptuelle Hashes (zur Deduplizierung)", + "preview_exclude_end_time_desc": "Schließen Sie die letzten x Sekunden von der Szenenvorschau aus. Dies kann ein Wert in Sekunden oder ein Prozentsatz (zB 2%) der gesamten Szenendauer sein.", + "preview_exclude_end_time_head": "Endzeit ausschließen", + "preview_exclude_start_time_desc": "Schließen Sie die ersten x Sekunden von der Szenenvorschau aus. Dies kann ein Wert in Sekunden oder ein Prozentsatz (zB 2%) der gesamten Szenendauer sein.", + "preview_exclude_start_time_head": "Startzeit ausschließen", + "preview_generation_options": "Optionen für die Erstellung von Vorschauen", + "preview_options": "Vorschauoptionen", + "preview_preset_desc": "Die Voreinstellung regelt Größe, Qualität und Encoding-Zeit der Vorschaugenerierung. Einstellungen jenseits von „slow“ haben vernachlässigbare Vorteile und werden nicht empfohlen.", + "preview_preset_head": "Vorschau-Kodierungseinstellung", + "preview_seg_count_desc": "Anzahl der Segmente in Vorschaudateien.", + "preview_seg_count_head": "Anzahl der Segmente in der Vorschau", + "preview_seg_duration_desc": "Dauer jedes Vorschausegments in Sekunden.", + "preview_seg_duration_head": "Vorschau der Segmentdauer", + "sprites": "Szenen-Scrubber Sprites", + "sprites_tooltip": "Sprites (für den Szenen-Scrubber)", + "transcodes": "Transkodierung", + "transcodes_tooltip": "MP4-Konvertierung von nicht unterstützten Videoformaten", + "video_previews": "Vorschau", + "video_previews_tooltip": "Videovorschauen, die abgespielt werden, wenn man den Mauszeiger über eine Szene bewegt" + }, + "scenes_found": "{count} Szenen gefunden", + "scrape_entity_query": "{entity_type} Scrape-Abfrage", + "scrape_entity_title": "{entity_type} Scrape-Ergebnisse", + "scrape_results_existing": "Vorhanden", + "scrape_results_scraped": "Gescraped", + "set_image_url_title": "Bild URL", + "unsaved_changes": "Nicht gespeicherte Änderungen. Bist du sicher dass du die Seite verlassen willst?" }, - "lightbox": { - "delay": "Verzögerung (Sek)", - "display_mode": { - "fit_horizontally": "Horizontal anpassen", - "fit_to_screen": "Vollbild", - "label": "Anzeigemodus", - "original": "Original" - }, - "options": "Optionen", - "page_header": "Seite {page} / {total}", - "reset_zoom_on_nav": "Zoomstufe beim Bildwechsel zurücksetzen", - "scale_up": { - "description": "Skaliere kleinere Bilder auf Bildschirmgröße", - "label": "Skalieren, um zu passen" - }, - "scroll_mode": { - "description": "Halte die Umschalttaste gedrückt, um einen anderen Modus vorübergehend zu verwenden.", - "label": "Scroll-Modus", - "pan_y": "Schwenkung Y", - "zoom": "Zoomen" - } + "dimensions": "Maße", + "director": "Regisseur", + "disambiguation": "Begriffsklärung", + "display_mode": { + "grid": "Gitter", + "list": "Liste", + "tagger": "Tagger", + "unknown": "Unbekannt", + "wall": "Wand" }, - "merge": { - "destination": "Ziel", - "empty_results": "Die Werte der Zielfelder bleiben unverändert.", - "source": "Quelle" + "donate": "Spenden", + "dupe_check": { + "description": "Bei Levels unterhalb von 'Exact' kann die Berechnung länger dauern. Bei niedrigeren Genauigkeitsstufen können auch falsch positive Ergebnisse zurückgegeben werden.", + "duration_diff": "Maximale Laufzeitdifferenz", + "duration_options": { + "any": "Jede", + "equal": "Gleich" + }, + "found_sets": "{setCount, plural, one{# Satz von Duplikaten gefunden.} other {# Sätze von Duplikaten gefunden.}}", + "options": { + "exact": "Genau", + "high": "Hoch", + "low": "Niedrig", + "medium": "Mittel" + }, + "search_accuracy_label": "Suchgenauigkeit", + "title": "Szenen-Duplikate" }, - "merge_tags": { - "destination": "Ziel", - "source": "Quelle" + "duplicated_phash": "Dopplung (phash)", + "duration": "Dauer", + "effect_filters": { + "aspect": "Seitenverhältnis", + "blue": "Blau", + "blur": "Unschärfe", + "brightness": "Helligkeit", + "contrast": "Kontrast", + "gamma": "Gamma", + "green": "Grün", + "hue": "Farbton", + "name": "Filter", + "name_transforms": "Transformierung", + "red": "Rot", + "reset_filters": "Filter zurücksetzen", + "reset_transforms": "Transformationen zurücksetzen", + "rotate": "Drehen", + "rotate_left_and_scale": "Nach links drehen und skalieren", + "rotate_right_and_scale": "Nach rechts drehen und skalieren", + "saturation": "Sättigung", + "scale": "Skalieren", + "warmth": "Wärme" }, - "overwrite_filter_confirm": "Möchten Sie die vorhandene gespeicherte Anfrage {entityName} wirklich überschreiben?", - "reassign_entity_title": "{count, plural, one {Weise {singularEntity} neu zu} other {Weise {pluralEntity} neu zu}}}", - "reassign_files": { - "destination": "Neu zuweisen an" + "empty_server": "Fügen Sie Ihrem Server einige Szenen hinzu, um Empfehlungen auf dieser Seite anzuzeigen.", + "errors": { + "image_index_greater_than_zero": "Bilderindex muss größer 0 sein", + "lazy_component_error_help": "Sollten Sie kürzlich ein Update für Stash durchgeführt haben, laden Sie bitte die Seite neu oder löschen Sie den Browser-Cache.", + "something_went_wrong": "Etwas ist schief gelaufen." }, - "scene_gen": { - "clip_previews": "Bild-Clip Vorschau", - "covers": "Szene-Cover", - "force_transcodes": "Transcode Erzeugung erzwingen", - "force_transcodes_tooltip": "Standardmäßig werden Transkodierungen nur erzeugt, wenn die Videodatei im Browser nicht unterstützt wird. Wenn diese Option aktiviert ist, werden Transkodierungen auch dann erstellt, wenn die Videodatei vom Browser unterstützt zu werden scheint.", - "image_previews": "Animierte Bildvorschauen", - "image_previews_tooltip": "Animierte WebP-Vorschaubilder, nur erforderlich, wenn der Vorschautyp auf Animiertes Bild eingestellt ist.", - "interactive_heatmap_speed": "Erzeugen von Heatmaps und Geschwindigkeiten für interaktive Szenen", - "marker_image_previews": "Animierte Vorschau für Markierungen", - "marker_image_previews_tooltip": "Animierte WebP-Vorschau für Markierungen, nur erforderlich, wenn der Vorschautyp auf Animiertes Bild eingestellt ist.", - "marker_screenshots": "Screenshots für Markierungen", - "marker_screenshots_tooltip": "Statische JPG-Bilder für Markierungen, nur erforderlich, wenn der Vorschautyp auf Statisches Bild eingestellt ist.", - "markers": "Vorschau für Markierungen", - "markers_tooltip": "20-Sekunden-Videos, die zum angegebenen Zeitpunkt beginnen.", - "override_preview_generation_options": "Überschreibe Optionen zur Erstellung von Vorschauen", - "override_preview_generation_options_desc": "Überschreibe die Optionen zur Erstellung von Vorschauen für diesen Vorgang. Die Standardeinstellungen werden unter System -> \tVorschau-Generierung festgelegt.", - "overwrite": "Vorhandene Dateien überschreiben", - "phash": "Perzeptuelle Hashes (zur Deduplizierung)", - "preview_exclude_end_time_desc": "Schließen Sie die letzten x Sekunden von der Szenenvorschau aus. Dies kann ein Wert in Sekunden oder ein Prozentsatz (zB 2%) der gesamten Szenendauer sein.", - "preview_exclude_end_time_head": "Endzeit ausschließen", - "preview_exclude_start_time_desc": "Schließen Sie die ersten x Sekunden von der Szenenvorschau aus. Dies kann ein Wert in Sekunden oder ein Prozentsatz (zB 2%) der gesamten Szenendauer sein.", - "preview_exclude_start_time_head": "Startzeit ausschließen", - "preview_generation_options": "Optionen für die Erstellung von Vorschauen", - "preview_options": "Vorschauoptionen", - "preview_preset_desc": "Die Voreinstellung regelt Größe, Qualität und Encoding-Zeit der Vorschaugenerierung. Einstellungen jenseits von „slow“ haben vernachlässigbare Vorteile und werden nicht empfohlen.", - "preview_preset_head": "Vorschau-Kodierungseinstellung", - "preview_seg_count_desc": "Anzahl der Segmente in Vorschaudateien.", - "preview_seg_count_head": "Anzahl der Segmente in der Vorschau", - "preview_seg_duration_desc": "Dauer jedes Vorschausegments in Sekunden.", - "preview_seg_duration_head": "Vorschau der Segmentdauer", - "sprites": "Szenen-Scrubber Sprites", - "sprites_tooltip": "Sprites (für den Szenen-Scrubber)", - "transcodes": "Transkodierung", - "transcodes_tooltip": "MP4-Konvertierung von nicht unterstützten Videoformaten", - "video_previews": "Vorschau", - "video_previews_tooltip": "Videovorschauen, die abgespielt werden, wenn man den Mauszeiger über eine Szene bewegt" + "ethnicity": "Ethnizität", + "existing_value": "vorhandener Wert", + "eye_color": "Augenfarbe", + "fake_tits": "Brustvergrößerungen", + "false": "Falsch", + "favourite": "Favorit", + "file": "Datei", + "file_count": "Dateianzahl", + "file_info": "Datei", + "file_mod_time": "Dateiänderungszeit", + "files": "Dateien", + "files_amount": "{value} Dateien", + "filesize": "Dateigröße", + "filter": "Filter", + "filter_name": "Filtername", + "filters": "Filter", + "folder": "Ordner", + "framerate": "Bildrate", + "frames_per_second": "{value} Bilder pro Sekunde", + "front_page": { + "types": { + "premade_filter": "Vorgefertigte Filter", + "saved_filter": "Gespeicherte Filter" + } }, - "scenes_found": "{count} Szenen gefunden", - "scrape_entity_query": "{entity_type} Scrape-Abfrage", - "scrape_entity_title": "{entity_type} Scrape-Ergebnisse", - "scrape_results_existing": "Vorhanden", - "scrape_results_scraped": "Gescraped", - "set_image_url_title": "Bild URL", - "unsaved_changes": "Nicht gespeicherte Änderungen. Bist du sicher dass du die Seite verlassen willst?" - }, - "dimensions": "Maße", - "director": "Regisseur", - "disambiguation": "Begriffsklärung", - "display_mode": { - "grid": "Gitter", - "list": "Liste", - "tagger": "Tagger", - "unknown": "Unbekannt", - "wall": "Wand" - }, - "donate": "Spenden", - "dupe_check": { - "description": "Bei Levels unterhalb von 'Exact' kann die Berechnung länger dauern. Bei niedrigeren Genauigkeitsstufen können auch falsch positive Ergebnisse zurückgegeben werden.", - "duration_diff": "Maximale Laufzeitdifferenz", - "duration_options": { - "any": "Jede", - "equal": "Gleich" + "galleries": "Galerien", + "gallery": "Galerie", + "gallery_count": "Galerienanzahl", + "gender": "Geschlecht", + "gender_types": { + "FEMALE": "Weiblich", + "INTERSEX": "Intersexuell", + "MALE": "Männlich", + "NON_BINARY": "Nicht-Binär", + "TRANSGENDER_FEMALE": "Trans* weiblich", + "TRANSGENDER_MALE": "Trans* männlich" }, - "found_sets": "{setCount, plural, one{# Satz von Duplikaten gefunden.} other {# Sätze von Duplikaten gefunden.}}", - "options": { - "exact": "Genau", - "high": "Hoch", - "low": "Niedrig", - "medium": "Mittel" + "hair_color": "Haarfarbe", + "handy_connection_status": { + "connecting": "Verbindet", + "disconnected": "Getrennt", + "error": "Fehler bei der Verbindung zu Handy", + "missing": "Fehlt", + "ready": "Bereit", + "syncing": "Synchronisiert mit Server", + "uploading": "Skript wird hochgeladen" }, - "search_accuracy_label": "Suchgenauigkeit", - "title": "Szenen-Duplikate" - }, - "duplicated_phash": "Dopplung (phash)", - "duration": "Dauer", - "effect_filters": { - "aspect": "Seitenverhältnis", - "blue": "Blau", - "blur": "Unschärfe", - "brightness": "Helligkeit", - "contrast": "Kontrast", - "gamma": "Gamma", - "green": "Grün", - "hue": "Farbton", - "name": "Filter", - "name_transforms": "Transformierung", - "red": "Rot", - "reset_filters": "Filter zurücksetzen", - "reset_transforms": "Transformationen zurücksetzen", - "rotate": "Drehen", - "rotate_left_and_scale": "Nach links drehen und skalieren", - "rotate_right_and_scale": "Nach rechts drehen und skalieren", - "saturation": "Sättigung", - "scale": "Skalieren", - "warmth": "Wärme" - }, - "empty_server": "Fügen Sie Ihrem Server einige Szenen hinzu, um Empfehlungen auf dieser Seite anzuzeigen.", - "errors": { - "image_index_greater_than_zero": "Bilderindex muss größer 0 sein", - "lazy_component_error_help": "Sollten Sie kürzlich ein Update für Stash durchgeführt haben, laden Sie bitte die Seite neu oder löschen Sie den Browser-Cache.", - "something_went_wrong": "Etwas ist schief gelaufen." - }, - "ethnicity": "Ethnizität", - "existing_value": "vorhandener Wert", - "eye_color": "Augenfarbe", - "fake_tits": "Brustvergrößerungen", - "false": "Falsch", - "favourite": "Favorit", - "file": "Datei", - "file_count": "Dateianzahl", - "file_info": "Datei", - "file_mod_time": "Dateiänderungszeit", - "files": "Dateien", - "files_amount": "{value} Dateien", - "filesize": "Dateigröße", - "filter": "Filter", - "filter_name": "Filtername", - "filters": "Filter", - "folder": "Ordner", - "framerate": "Bildrate", - "frames_per_second": "{value} Bilder pro Sekunde", - "front_page": { - "types": { - "premade_filter": "Vorgefertigte Filter", - "saved_filter": "Gespeicherte Filter" - } - }, - "galleries": "Galerien", - "gallery": "Galerie", - "gallery_count": "Galerienanzahl", - "gender": "Geschlecht", - "gender_types": { - "FEMALE": "Weiblich", - "INTERSEX": "Intersexuell", - "MALE": "Männlich", - "NON_BINARY": "Nicht-Binär", - "TRANSGENDER_FEMALE": "Trans* weiblich", - "TRANSGENDER_MALE": "Trans* männlich" - }, - "hair_color": "Haarfarbe", - "handy_connection_status": { - "connecting": "Verbindet", - "disconnected": "Getrennt", - "error": "Fehler bei der Verbindung zu Handy", - "missing": "Fehlt", - "ready": "Bereit", - "syncing": "Synchronisiert mit Server", - "uploading": "Skript wird hochgeladen" - }, - "hasChapters": "Hat Kapitel", - "hasMarkers": "Hat Markierungen", - "height": "Größe", - "height_cm": "Höhe (cm)", - "help": "Hilfe", - "ignore_auto_tag": "Auto-Tag ignorieren", - "image": "Bild", - "image_count": "Bilderanzahl", - "image_index": "Bild #", - "images": "Bilder", - "include_parent_tags": "Übergeordnete Tags einbeziehen", - "include_sub_studios": "Untergeordnete Studios einbeziehen", - "include_sub_tags": "Untergeordnete Tags einbeziehen", - "instagram": "Instagram", - "interactive": "Interaktiv", - "interactive_speed": "Interaktive Geschwindigkeit", - "isMissing": "Fehlt", - "last_played_at": "Zuletzt Abgespielt Am", - "library": "Bibliothek", - "loading": { - "generic": "Wird geladen…" - }, - "marker_count": "Anzahl an Markierungen", - "markers": "Markierungen", - "measurements": "Maße", - "media_info": { - "audio_codec": "Audio-Codec", - "checksum": "Prüfsumme", - "downloaded_from": "Heruntergeladen von", - "hash": "Hash", + "hasChapters": "Hat Kapitel", + "hasMarkers": "Hat Markierungen", + "height": "Größe", + "height_cm": "Höhe (cm)", + "help": "Hilfe", + "ignore_auto_tag": "Auto-Tag ignorieren", + "image": "Bild", + "image_count": "Bilderanzahl", + "image_index": "Bild #", + "images": "Bilder", + "include_parent_tags": "Übergeordnete Tags einbeziehen", + "include_sub_studios": "Untergeordnete Studios einbeziehen", + "include_sub_tags": "Untergeordnete Tags einbeziehen", + "instagram": "Instagram", + "interactive": "Interaktiv", "interactive_speed": "Interaktive Geschwindigkeit", - "performer_card": { - "age": "{age} {years_old}", - "age_context": "{age} {years_old} in dieser Szene" + "isMissing": "Fehlt", + "last_played_at": "Zuletzt Abgespielt Am", + "library": "Bibliothek", + "loading": { + "generic": "Wird geladen…" }, - "phash": "PHashwert", - "play_count": "Anzahl Wiedergaben", - "play_duration": "Abspielzeit", - "stream": "Stream", - "video_codec": "Video-Codec" - }, - "megabits_per_second": "{value} Megabit pro Sekunde", - "metadata": "Metadaten", - "movie": "Film", - "movie_scene_number": "Filmszenennummer", - "movies": "Filme", - "name": "Name", - "new": "Neu", - "none": "Keiner", - "o_counter": "O-Zähler", - "operations": "Operationen", - "organized": "Organisiert", - "pagination": { - "first": "Erste", - "last": "Letzte", - "next": "Nächste", - "previous": "Vorherige" - }, - "parent_of": "Übergeordnet von {children}", - "parent_studios": "Übergeordnete Studios", - "parent_tag_count": "Anzahl übergeordneter Tags", - "parent_tags": "Übergeordnete Tags", - "part_of": "Übergeordnet von {parent}", - "path": "Pfad", - "penis": "Penis", - "penis_length": "Penislänge", - "penis_length_cm": "Penislänge (cm)", - "perceptual_similarity": "Wahrnehmungsähnlichkeit (phash)", - "performer": "Darsteller", - "performerTags": "Darsteller-Tags", - "performer_age": "Alter der Darsteller", - "performer_count": "Darstelleranzahl", - "performer_favorite": "Darsteller favorisiert", - "performer_image": "Darsteller-Bild", - "performer_tagger": { - "add_new_performers": "Neue Darsteller hinzufügen", - "any_names_entered_will_be_queried": "Alle eingetragenen Namen werden bei der stash-box Instanz nachgeschlagen und hinzugefügt, wenn gefunden. Nur exakte Übereinstimmungen werden als Treffer gewertet.", - "batch_add_performers": "Stapelverarbeitung für Darsteller", - "batch_update_performers": "Stapelverarbeitungsaktualisierung für Darsteller", - "config": { - "active_stash-box_instance": "Ausgewählte stash-box Instanz:", - "edit_excluded_fields": "Ausgeschlossene Felder bearbeiten", - "excluded_fields": "Ausgeschlossene Felder:", - "no_fields_are_excluded": "Keine Felder werden ausgeschlossen", - "no_instances_found": "Keine Instanzen gefunden", - "these_fields_will_not_be_changed_when_updating_performers": "Diese Felder werden durch die Aktualisierung nicht verändert." - }, - "current_page": "Aktuelle Seite", - "failed_to_save_performer": "Fehler beim Speichern der Darsteller \"{performer}\"", - "name_already_exists": "Name bereits vergeben", - "network_error": "Netzwerkfehler", - "no_results_found": "Keine Ergebnisse gefunden.", - "number_of_performers_will_be_processed": "{performer_count} Darsteller werden verarbeitet", - "performer_already_tagged": "Darsteller bereits getagged", - "performer_names_separated_by_comma": "Darstellernamen, mit Komma getrennt", - "performer_selection": "Darstellerauswahl", - "performer_successfully_tagged": "Darsteller erfolgreich getagged:", - "query_all_performers_in_the_database": "Alle Darsteller in der Datenbank", - "refresh_tagged_performers": "Aktualisieren getaggter Darsteller", - "refreshing_will_update_the_data": "Bei der Aktualisierung werden die Metadaten aller getaggten Darsteller über die stash-box-Instanz aktualisiert.", - "status_tagging_job_queued": "Status: Tagging-Auftrag in der Warteschlange", - "status_tagging_performers": "Status: Tagge Darsteller", - "tag_status": "Tag Status", - "to_use_the_performer_tagger": "Um den Darsteller-Tagger zu benutzen, muss eine stash-box Instanz konfiguriert sein.", - "untagged_performers": "Nicht getaggte Darsteller", - "update_performer": "Darsteller aktualisieren", - "update_performers": "Darsteller aktualisieren", - "updating_untagged_performers_description": "Bei der Aktualisierung von nicht getaggten Darstellern wird versucht die Metadaten alle Darsteller, welche keine StashID haben, zu aktualisieren." - }, - "performers": "Darsteller", - "piercings": "Piercings", - "play_count": "Anzahl der Videowiedergaben", - "play_duration": "Abspiellänge", - "primary_file": "Primäre Datei", - "queue": "Playlist", - "random": "Zufällig", - "rating": "Wertung", - "recently_added_objects": "Kürzlich hinzugefügte {objects}", - "recently_released_objects": "Kürzlich erschienene {objects}", - "release_notes": "Versionshinweise", - "resolution": "Auflösung", - "resume_time": "Zeit fortsetzen", - "scene": "Szene", - "sceneTagger": "Szenen-Tagger", - "sceneTags": "Szenen-Tags", - "scene_code": "Studio Code", - "scene_count": "Szenenanzahl", - "scene_created_at": "Szene angelegt am", - "scene_date": "Datum der Szene", - "scene_id": "Szenen-ID", - "scene_updated_at": "Szene geändert am", - "scenes": "Szenen", - "scenes_updated_at": "Szene aktualisiert am", - "search_filter": { - "edit_filter": "Filter editieren", - "name": "Filter", - "saved_filters": "Gespeicherte Filter", - "update_filter": "Filter aktualisieren" - }, - "second": "Sekunde", - "seconds": "Sekunden", - "settings": "Einstellungen", - "setup": { - "confirm": { - "almost_ready": "Wir sind fast bereit die Konfiguration abzuschließen. Bitte bestätige die folgenden Einstellungen. Du kannst auf Zurück klicken, um etwas Falsches zu ändern. Wenn alles gut aussieht, klicke auf Bestätigen, um dein System zu erstellen.", - "blobs_directory": "Binärdaten-Verzeichnis", - "cache_directory": "Cache-Verzeichnis", - "configuration_file_location": "Ort der Konfigurationsdatei:", - "database_file_path": "Dateipfad der Datenbank", - "default_blobs_location": "", - "default_cache_location": "/cache", - "default_db_location": "/stash-go.sqlite", - "default_generated_content_location": "/generated", - "generated_directory": "Ordner der generierten Hilfsdateien", - "nearly_there": "Fast geschafft!", - "stash_library_directories": "Stash Bibliotheks-Ordner" + "marker_count": "Anzahl an Markierungen", + "markers": "Markierungen", + "measurements": "Maße", + "media_info": { + "audio_codec": "Audio-Codec", + "checksum": "Prüfsumme", + "downloaded_from": "Heruntergeladen von", + "hash": "Hash", + "interactive_speed": "Interaktive Geschwindigkeit", + "performer_card": { + "age": "{age} {years_old}", + "age_context": "{age} {years_old} in dieser Szene" + }, + "phash": "PHashwert", + "play_count": "Anzahl Wiedergaben", + "play_duration": "Abspielzeit", + "stream": "Stream", + "video_codec": "Video-Codec" }, - "creating": { - "creating_your_system": "Erstelle dein System", - "ffmpeg_notice": "Wenn ffmpeg nicht in deinen Pfaden existiert, warte bitte während Stash es für dich herunterlädt. Der Downloadfortschritt wird im Konsolen-Output ausgegeben." + "megabits_per_second": "{value} Megabit pro Sekunde", + "metadata": "Metadaten", + "movie": "Film", + "movie_scene_number": "Filmszenennummer", + "movies": "Filme", + "name": "Name", + "new": "Neu", + "none": "Keiner", + "o_counter": "O-Zähler", + "operations": "Operationen", + "organized": "Organisiert", + "pagination": { + "first": "Erste", + "last": "Letzte", + "next": "Nächste", + "previous": "Vorherige" }, - "errors": { - "something_went_wrong": "Oh nein! Etwas ist schief gelaufen!", - "something_went_wrong_description": "Es sieht so aus, als gäbe es Probleme mit deinen Eingaben, klicke Zurück und repariere sie. Falls du nicht weißt was du falsch gemacht hast, helfen wir gerne auf {discordLink}. Solltest du dir sicher sein einen Bug gefunden zu haben, schau doch mal auf {githubLink} vorbei.", - "something_went_wrong_while_setting_up_your_system": "Etwas lief bei der Erstellung des Systems falsch. Hier ist die Fehlermeldung: {error}" + "parent_of": "Übergeordnet von {children}", + "parent_studios": "Übergeordnete Studios", + "parent_tag_count": "Anzahl übergeordneter Tags", + "parent_tags": "Übergeordnete Tags", + "part_of": "Übergeordnet von {parent}", + "path": "Pfad", + "penis": "Penis", + "penis_length": "Penislänge", + "penis_length_cm": "Penislänge (cm)", + "perceptual_similarity": "Wahrnehmungsähnlichkeit (phash)", + "performer": "Darsteller", + "performer_tags": "Darsteller-Tags", + "performer_age": "Alter der Darsteller", + "performer_count": "Darstelleranzahl", + "performer_favorite": "Darsteller favorisiert", + "performer_image": "Darsteller-Bild", + "performer_tagger": { + "add_new_performers": "Neue Darsteller hinzufügen", + "any_names_entered_will_be_queried": "Alle eingetragenen Namen werden bei der stash-box Instanz nachgeschlagen und hinzugefügt, wenn gefunden. Nur exakte Übereinstimmungen werden als Treffer gewertet.", + "batch_add_performers": "Stapelverarbeitung für Darsteller", + "batch_update_performers": "Stapelverarbeitungsaktualisierung für Darsteller", + "config": { + "active_stash-box_instance": "Ausgewählte stash-box Instanz:", + "edit_excluded_fields": "Ausgeschlossene Felder bearbeiten", + "excluded_fields": "Ausgeschlossene Felder:", + "no_fields_are_excluded": "Keine Felder werden ausgeschlossen", + "no_instances_found": "Keine Instanzen gefunden", + "these_fields_will_not_be_changed_when_updating_performers": "Diese Felder werden durch die Aktualisierung nicht verändert." + }, + "current_page": "Aktuelle Seite", + "failed_to_save_performer": "Fehler beim Speichern der Darsteller \"{performer}\"", + "name_already_exists": "Name bereits vergeben", + "network_error": "Netzwerkfehler", + "no_results_found": "Keine Ergebnisse gefunden.", + "number_of_performers_will_be_processed": "{performer_count} Darsteller werden verarbeitet", + "performer_already_tagged": "Darsteller bereits getagged", + "performer_names_separated_by_comma": "Darstellernamen, mit Komma getrennt", + "performer_selection": "Darstellerauswahl", + "performer_successfully_tagged": "Darsteller erfolgreich getagged:", + "query_all_performers_in_the_database": "Alle Darsteller in der Datenbank", + "refresh_tagged_performers": "Aktualisieren getaggter Darsteller", + "refreshing_will_update_the_data": "Bei der Aktualisierung werden die Metadaten aller getaggten Darsteller über die stash-box-Instanz aktualisiert.", + "status_tagging_job_queued": "Status: Tagging-Auftrag in der Warteschlange", + "status_tagging_performers": "Status: Tagge Darsteller", + "tag_status": "Tag Status", + "to_use_the_performer_tagger": "Um den Darsteller-Tagger zu benutzen, muss eine stash-box Instanz konfiguriert sein.", + "untagged_performers": "Nicht getaggte Darsteller", + "update_performer": "Darsteller aktualisieren", + "update_performers": "Darsteller aktualisieren", + "updating_untagged_performers_description": "Bei der Aktualisierung von nicht getaggten Darstellern wird versucht die Metadaten alle Darsteller, welche keine StashID haben, zu aktualisieren." }, - "folder": { - "file_path": "Dateipfad", - "up_dir": "Ein Verzeichnis hoch" + "performers": "Darsteller", + "piercings": "Piercings", + "play_count": "Anzahl der Videowiedergaben", + "play_duration": "Abspiellänge", + "primary_file": "Primäre Datei", + "queue": "Playlist", + "random": "Zufällig", + "rating": "Wertung", + "recently_added_objects": "Kürzlich hinzugefügte {objects}", + "recently_released_objects": "Kürzlich erschienene {objects}", + "release_notes": "Versionshinweise", + "resolution": "Auflösung", + "resume_time": "Zeit fortsetzen", + "scene": "Szene", + "sceneTagger": "Szenen-Tagger", + "scene_tags": "Szenen-Tags", + "scene_code": "Studio Code", + "scene_count": "Szenenanzahl", + "scene_created_at": "Szene angelegt am", + "scene_date": "Datum der Szene", + "scene_id": "Szenen-ID", + "scene_updated_at": "Szene geändert am", + "scenes": "Szenen", + "scenes_updated_at": "Szene aktualisiert am", + "search_filter": { + "edit_filter": "Filter editieren", + "name": "Filter", + "saved_filters": "Gespeicherte Filter", + "update_filter": "Filter aktualisieren" }, - "github_repository": "Github Repository", - "migrate": { - "backup_database_path_leave_empty_to_disable_backup": "Backup Datenbank Pfad (Leer lassen, um Backups aus zu schalten):", - "backup_recommended": "Es wird dringend empfohlen ein Backup deiner Datenbank vor der Migration anzufertigen. Wir können das für dich erledigen, indem wir eine Kopie deiner Datenbank in {defaultBackupPath} anfertigen.", - "migrating_database": "Migration der Datenbank im Gange", - "migration_failed": "Migration der Datenbank fehlgeschlagen", - "migration_failed_error": "Der folgende Fehler ist bei der Migration der Datenbank aufgetreten:", - "migration_failed_help": "Bitte führe nötige Korrekturen durch und probiere es erneut. Falls du nicht weißt was du falsch gemacht hast, helfen wir gerne auf {discordLink}. Solltest du dir sicher sein einen Bug gefunden zu haben, schau doch mal auf {githubLink} vorbei.", - "migration_irreversible_warning": "Der Migrationsprozess des Datenbankschemas ist irreversibel. Nachdem sie ausgeführt wurde, ist deine Datenbank inkompatibel mit älteren Versionen von Stash.", - "migration_notes": "Anmerkungen zur Migration", - "migration_required": "Migration nötig", - "perform_schema_migration": "Führe Migration des Datenbankschemas durch", - "schema_too_old": "Das Schema deiner aktuellen Stash-Datenbank ist Version {databaseSchema} und muss auf Version {appSchema} migriert werden. Die aktuelle Version von Stash wird nicht ohne Migration der Datenbank funktionieren können. Wenn Sie nicht migrieren möchten, müssen Sie ein Downgrade auf eine Version durchführen, die Ihrem Datenbankschema entspricht." + "second": "Sekunde", + "seconds": "Sekunden", + "settings": "Einstellungen", + "setup": { + "confirm": { + "almost_ready": "Wir sind fast bereit die Konfiguration abzuschließen. Bitte bestätige die folgenden Einstellungen. Du kannst auf Zurück klicken, um etwas Falsches zu ändern. Wenn alles gut aussieht, klicke auf Bestätigen, um dein System zu erstellen.", + "blobs_directory": "Binärdaten-Verzeichnis", + "cache_directory": "Cache-Verzeichnis", + "configuration_file_location": "Ort der Konfigurationsdatei:", + "database_file_path": "Dateipfad der Datenbank", + "default_blobs_location": "", + "default_cache_location": "/cache", + "default_db_location": "/stash-go.sqlite", + "default_generated_content_location": "/generated", + "generated_directory": "Ordner der generierten Hilfsdateien", + "nearly_there": "Fast geschafft!", + "stash_library_directories": "Stash Bibliotheks-Ordner" + }, + "creating": { + "creating_your_system": "Erstelle dein System", + "ffmpeg_notice": "Wenn ffmpeg nicht in deinen Pfaden existiert, warte bitte während Stash es für dich herunterlädt. Der Downloadfortschritt wird im Konsolen-Output ausgegeben." + }, + "errors": { + "something_went_wrong": "Oh nein! Etwas ist schief gelaufen!", + "something_went_wrong_description": "Es sieht so aus, als gäbe es Probleme mit deinen Eingaben, klicke Zurück und repariere sie. Falls du nicht weißt was du falsch gemacht hast, helfen wir gerne auf {discordLink}. Solltest du dir sicher sein einen Bug gefunden zu haben, schau doch mal auf {githubLink} vorbei.", + "something_went_wrong_while_setting_up_your_system": "Etwas lief bei der Erstellung des Systems falsch. Hier ist die Fehlermeldung: {error}" + }, + "folder": { + "file_path": "Dateipfad", + "up_dir": "Ein Verzeichnis hoch" + }, + "github_repository": "Github Repository", + "migrate": { + "backup_database_path_leave_empty_to_disable_backup": "Backup Datenbank Pfad (Leer lassen, um Backups aus zu schalten):", + "backup_recommended": "Es wird dringend empfohlen ein Backup deiner Datenbank vor der Migration anzufertigen. Wir können das für dich erledigen, indem wir eine Kopie deiner Datenbank in {defaultBackupPath} anfertigen.", + "migrating_database": "Migration der Datenbank im Gange", + "migration_failed": "Migration der Datenbank fehlgeschlagen", + "migration_failed_error": "Der folgende Fehler ist bei der Migration der Datenbank aufgetreten:", + "migration_failed_help": "Bitte führe nötige Korrekturen durch und probiere es erneut. Falls du nicht weißt was du falsch gemacht hast, helfen wir gerne auf {discordLink}. Solltest du dir sicher sein einen Bug gefunden zu haben, schau doch mal auf {githubLink} vorbei.", + "migration_irreversible_warning": "Der Migrationsprozess des Datenbankschemas ist irreversibel. Nachdem sie ausgeführt wurde, ist deine Datenbank inkompatibel mit älteren Versionen von Stash.", + "migration_notes": "Anmerkungen zur Migration", + "migration_required": "Migration nötig", + "perform_schema_migration": "Führe Migration des Datenbankschemas durch", + "schema_too_old": "Das Schema deiner aktuellen Stash-Datenbank ist Version {databaseSchema} und muss auf Version {appSchema} migriert werden. Die aktuelle Version von Stash wird nicht ohne Migration der Datenbank funktionieren können. Wenn Sie nicht migrieren möchten, müssen Sie ein Downgrade auf eine Version durchführen, die Ihrem Datenbankschema entspricht." + }, + "paths": { + "database_filename_empty_for_default": "Datenbank-Dateiname (Leer für Standardwert)", + "description": "Als nächstes müssen wir festhalten wo für deine Porno-Kollektion finden können und wo wir unsere Datenbank, generierten Hilfsdateien und Cache speichern dürfen. Diese Einstellungen lassen sich später auch noch ändern.", + "path_to_cache_directory_empty_for_default": "Pfad zum Cache-Verzeichnis (leer für Voreinstellung)", + "path_to_generated_directory_empty_for_default": "Pfad zum Ordner der Hilfsdateien (Leer für Standardwert)", + "set_up_your_paths": "Setze die Dateipfade", + "stash_alert": "Es wurde kein Bibliotheks-Pfad gesetzt. Somit werden keine Dateien in Stash eingescannt. Bist du dir sicher?", + "where_can_stash_store_blobs": "Wo darf Stash die Binärdaten-Blobs speichern?", + "where_can_stash_store_blobs_description": "Stash kann Binärdaten wie Szene-Cover, Darsteller-, Studio- und Tag-Bilder entweder in der Datenbank oder auf dem Dateisystem speichern. Als Voreinstellung wird Stash ein Verzeichnis blobs im Ordner erstellen in dem auch die Konfigurationsdatei gespeichert ist. Wenn Sie dies ändern möchten, geben Sie bitte einen absoluten oder relativen Pfad an. Stash wird dieses Verzeichnis für Sie erstellen, sollte es nicht bereits existieren.", + "where_can_stash_store_blobs_description_addendum": "Wenn Sie alternativ die Daten in der Datenbank speichern wollen, dann lassen Sie das Feld leer. Notiz: Dies wird die Datenbank start vergrößern und Migrierungsaufgaben werden länger dauern.", + "where_can_stash_store_cache_files": "Wo darf Stash Cache-Dateien zwischenspeichern?", + "where_can_stash_store_cache_files_description": "Um einige Funktionen wie HLS/DASH Live-Transkodierung zu nutzen, muss Stash über ein Cache-Verzeichnis als temporären Zwischenspeicher verfügen. Als Voreinstellung wird Stash ein Verzeichnis cache im Ordner erstellen in dem auch die Konfigurationsdatei gespeichert ist. Wenn Sie dies ändern möchten, geben Sie bitte einen absoluten oder relativen Pfad an. Stash wird dieses Verzeichnis für Sie erstellen, sollte es nicht bereits existieren.", + "where_can_stash_store_its_database": "Wo darf Stash seine Datenbank abspeichern?", + "where_can_stash_store_its_database_description": "Stash nutzt eine SQLite-Datenbank, um Metadaten über deine Pornosammlung zu speichern. Standardmäßig wird diese als stash-go.sqlite in dem Ordner gespeichert, in dem auf deine Konfigurationsdatei liegt. Wenn du das ändern möchtest, gebe bitte einen absoluten oder relativen (gegenüber der aktuellen working directory) Pfad mit Dateinamen an.", + "where_can_stash_store_its_database_warning": "ACHTUNG: Ein Speicherort abseits des Systems auf dem Stash ausgeführt wird (z.B. speichern der Datenbank auf einem Netzwerkspeicher während Stash auf einem anderen Computer ausgeführt wird) ist nicht unterstützt! SQLite ist nicht für Nutzung über das Netzwerk ausgelegt und der Versuch, dies zu tun, kann sehr leicht dazu führen, dass Ihre gesamte Datenbank beschädigt wird.", + "where_can_stash_store_its_generated_content": "Wo darf Stash seine generierten Hilfsdateien abspeichern?", + "where_can_stash_store_its_generated_content_description": "Um Thumbnails, Previews und Sprites zur Verfügung zu stellen, generiert Stash diese aus deinen Videos und Bildern. Das schließt auch Transkodierungen von nicht unterstützten Dateiformaten mit ein. Standardmäßig wird Stash diese im Ordner generated abspeichern, der sich am Ort der Konfigurationsdatei befindet. Wenn du das ändern möchtest, gebe bitte einen absoluten oder relativen (gegenüber der aktuellen working directory) Pfad an. Stash wird den Ordner erstellen, sollte er noch nicht existieren.", + "where_is_your_porn_located": "Wo finden wir deine Porno-Kollektion?", + "where_is_your_porn_located_description": "Füge Ordner hinzu in denen sich deine Porno-Videos und -Bilder befinden. Stash wird diese Ordner nutzen, um Videos und Bilder in das System einzupflegen." + }, + "stash_setup_wizard": "Einrichtungshelfer für Stash", + "success": { + "getting_help": "Hilfe", + "help_links": "Solltest du Probleme , Fragen oder Anregungen haben, öffne gerne eine issue auf {githubLink} oder teile sie der Community auf {discordLink} mit.", + "in_app_manual_explained": "Du wirst angehalten das In-App-Benutzerhandbuch aufzusuchen, welches du über den {icon}-Icon in der oberen rechten Ecke findest", + "next_config_step_one": "Wir bringen dich als nächstes zu den Optionen von Stash. Diese Seiten erlauben es dir zu bestimmen, welche Dateien du einpflegen möchtest oder eben auch nicht, einen Benutzernamen und Passwort anzulegen um dein System zu schützen und haben außerdem noch viele weitere Optionen.", + "next_config_step_two": "Wenn du mit deinen Angaben zufrieden bist, kannst du anfangen, indem du Stash deine Dateien einpflegen lässt. Dazu klicke zunächst auf {localized_task} und dann auf {localized_scan}.", + "open_collective": "Schau doch mal auf unserer {open_collective_link} vorbei, um herauszufinden, wie du zu der fortwährenden Entwicklung von Stash beitragen kannst.", + "support_us": "Unterstütze uns", + "thanks_for_trying_stash": "Danke fürs Ausprobieren von Stash!", + "welcome_contrib": "Außerdem sind Beiträge in Form von Code (Bug-Fixes, Verbesserungen, Features), Tests, Bug-Reports, Ideen für Features und Verbesserungen, sowie User-Support immer willkommen. Details dazu im entsprechenden Kapitel des In-App-Benutzerhandbuchs.", + "your_system_has_been_created": "Geschafft! Dein System wurde erstellt!" + }, + "welcome": { + "config_path_logic_explained": "Stash versucht zunächst seine Konfigurationsdatei (config.yml) in dem aktuellen Arbeitsverzeichnis zu finden, wenn das nicht gelingt fällt es auf $HOME/.stash/config.yml (bei Windows ist das %USERPROFILE%\\.stash\\config.yml) zurück. Du kannst Stash auch einen Pfad beim Start durch die Kommandozeilen-Option -c '' or --config '' vorgeben.", + "in_current_stash_directory": "Im Verzeichnis $HOME/.stash", + "in_the_current_working_directory": "Im aktuellen Arbeitsverzeichnis", + "next_step": "Nachdem das alles aus dem Weg ist, sind wir jetzt bereit ein neues System zu erstellen. Wähle dazu zunächst aus wo du die Konfigurationsdatei speichern möchtest und klicke auf Weiter.", + "store_stash_config": "Wo möchtest du die Stash Konfigurationsdatei speichern?", + "unable_to_locate_config": "Wenn du das hier liest, konnte Stash keine existierende Konfiguration finden. Dieser Wizard wird dich deshalb durch den Prozess führen, eine neue Konfiguration anzulegen.", + "unexpected_explained": "Wenn du diesen Wizard nicht erwartest, starte Stash im korrekten Arbeitsverzeichnis neu oder setze den Pfad zur Konfigurationsdatei mit der Kommandozeilenoption -c." + }, + "welcome_specific_config": { + "config_path": "Stash wird den folgenden Pfad für die Konfigurationsdatei verwenden: {path}", + "next_step": "Wenn du bereit bist ein neues System anzulegen, klicke Weiter.", + "unable_to_locate_specified_config": "Wenn du das hier liest, konnte Stash die Konfigurationsdatei, welche spezifiziert wurde, nicht finden. Dieser Wizard wird dich deshalb durch den Prozess führen, eine neue Konfiguration anzulegen." + }, + "welcome_to_stash": "Willkommen zu Stash" }, - "paths": { - "database_filename_empty_for_default": "Datenbank-Dateiname (Leer für Standardwert)", - "description": "Als nächstes müssen wir festhalten wo für deine Porno-Kollektion finden können und wo wir unsere Datenbank, generierten Hilfsdateien und Cache speichern dürfen. Diese Einstellungen lassen sich später auch noch ändern.", - "path_to_blobs_directory_empty_for_database": "Pfad zum Binärblob-Verzeichnis (leer um die Datenbank dafür zu nutzen)", - "path_to_cache_directory_empty_for_default": "Pfad zum Cache-Verzeichnis (leer für Voreinstellung)", - "path_to_generated_directory_empty_for_default": "Pfad zum Ordner der Hilfsdateien (Leer für Standardwert)", - "set_up_your_paths": "Setze die Dateipfade", - "stash_alert": "Es wurde kein Bibliotheks-Pfad gesetzt. Somit werden keine Dateien in Stash eingescannt. Bist du dir sicher?", - "where_can_stash_store_blobs": "Wo darf Stash die Binärdaten-Blobs speichern?", - "where_can_stash_store_blobs_description": "Stash kann Binärdaten wie Szene-Cover, Darsteller-, Studio- und Tag-Bilder entweder in der Datenbank oder auf dem Dateisystem speichern. Als Voreinstellung wird Stash ein Verzeichnis blobs im Ordner erstellen in dem auch die Konfigurationsdatei gespeichert ist. Wenn Sie dies ändern möchten, geben Sie bitte einen absoluten oder relativen Pfad an. Stash wird dieses Verzeichnis für Sie erstellen, sollte es nicht bereits existieren.", - "where_can_stash_store_blobs_description_addendum": "Wenn Sie alternativ die Daten in der Datenbank speichern wollen, dann lassen Sie das Feld leer. Notiz: Dies wird die Datenbank start vergrößern und Migrierungsaufgaben werden länger dauern.", - "where_can_stash_store_cache_files": "Wo darf Stash Cache-Dateien zwischenspeichern?", - "where_can_stash_store_cache_files_description": "Um einige Funktionen wie HLS/DASH Live-Transkodierung zu nutzen, muss Stash über ein Cache-Verzeichnis als temporären Zwischenspeicher verfügen. Als Voreinstellung wird Stash ein Verzeichnis cache im Ordner erstellen in dem auch die Konfigurationsdatei gespeichert ist. Wenn Sie dies ändern möchten, geben Sie bitte einen absoluten oder relativen Pfad an. Stash wird dieses Verzeichnis für Sie erstellen, sollte es nicht bereits existieren.", - "where_can_stash_store_its_database": "Wo darf Stash seine Datenbank abspeichern?", - "where_can_stash_store_its_database_description": "Stash nutzt eine SQLite-Datenbank, um Metadaten über deine Pornosammlung zu speichern. Standardmäßig wird diese als stash-go.sqlite in dem Ordner gespeichert, in dem auf deine Konfigurationsdatei liegt. Wenn du das ändern möchtest, gebe bitte einen absoluten oder relativen (gegenüber der aktuellen working directory) Pfad mit Dateinamen an.", - "where_can_stash_store_its_database_warning": "ACHTUNG: Ein Speicherort abseits des Systems auf dem Stash ausgeführt wird (z.B. speichern der Datenbank auf einem Netzwerkspeicher während Stash auf einem anderen Computer ausgeführt wird) ist nicht unterstützt! SQLite ist nicht für Nutzung über das Netzwerk ausgelegt und der Versuch, dies zu tun, kann sehr leicht dazu führen, dass Ihre gesamte Datenbank beschädigt wird.", - "where_can_stash_store_its_generated_content": "Wo darf Stash seine generierten Hilfsdateien abspeichern?", - "where_can_stash_store_its_generated_content_description": "Um Thumbnails, Previews und Sprites zur Verfügung zu stellen, generiert Stash diese aus deinen Videos und Bildern. Das schließt auch Transkodierungen von nicht unterstützten Dateiformaten mit ein. Standardmäßig wird Stash diese im Ordner generated abspeichern, der sich am Ort der Konfigurationsdatei befindet. Wenn du das ändern möchtest, gebe bitte einen absoluten oder relativen (gegenüber der aktuellen working directory) Pfad an. Stash wird den Ordner erstellen, sollte er noch nicht existieren.", - "where_is_your_porn_located": "Wo finden wir deine Porno-Kollektion?", - "where_is_your_porn_located_description": "Füge Ordner hinzu in denen sich deine Porno-Videos und -Bilder befinden. Stash wird diese Ordner nutzen, um Videos und Bilder in das System einzupflegen." + "stash_id": "Stash-ID", + "stash_id_endpoint": "Stash ID Endpunkt", + "stash_ids": "Stash IDs", + "stashbox": { + "go_review_draft": "Gehe zu {endpoint_name}, um Entwurf zu begutachten.", + "selected_stash_box": "Ausgewählter Stash-Box Endpunkt", + "submission_failed": "Einreichen fehlgeschlagen", + "submission_successful": "Einreichen erfolgreich", + "submit_update": "Existiert bereits in {endpoint_name}" }, - "stash_setup_wizard": "Einrichtungshelfer für Stash", - "success": { - "getting_help": "Hilfe", - "help_links": "Solltest du Probleme , Fragen oder Anregungen haben, öffne gerne eine issue auf {githubLink} oder teile sie der Community auf {discordLink} mit.", - "in_app_manual_explained": "Du wirst angehalten das In-App-Benutzerhandbuch aufzusuchen, welches du über den {icon}-Icon in der oberen rechten Ecke findest", - "next_config_step_one": "Wir bringen dich als nächstes zu den Optionen von Stash. Diese Seiten erlauben es dir zu bestimmen, welche Dateien du einpflegen möchtest oder eben auch nicht, einen Benutzernamen und Passwort anzulegen um dein System zu schützen und haben außerdem noch viele weitere Optionen.", - "next_config_step_two": "Wenn du mit deinen Angaben zufrieden bist, kannst du anfangen, indem du Stash deine Dateien einpflegen lässt. Dazu klicke zunächst auf {localized_task} und dann auf {localized_scan}.", - "open_collective": "Schau doch mal auf unserer {open_collective_link} vorbei, um herauszufinden, wie du zu der fortwährenden Entwicklung von Stash beitragen kannst.", - "support_us": "Unterstütze uns", - "thanks_for_trying_stash": "Danke fürs Ausprobieren von Stash!", - "welcome_contrib": "Außerdem sind Beiträge in Form von Code (Bug-Fixes, Verbesserungen, Features), Tests, Bug-Reports, Ideen für Features und Verbesserungen, sowie User-Support immer willkommen. Details dazu im entsprechenden Kapitel des In-App-Benutzerhandbuchs.", - "your_system_has_been_created": "Geschafft! Dein System wurde erstellt!" + "statistics": "Statistiken", + "stats": { + "image_size": "Bildspeicher", + "scenes_duration": "Szenendauer", + "scenes_size": "Szenenspeicher" }, - "welcome": { - "config_path_logic_explained": "Stash versucht zunächst seine Konfigurationsdatei (config.yml) in dem aktuellen Arbeitsverzeichnis zu finden, wenn das nicht gelingt fällt es auf $HOME/.stash/config.yml (bei Windows ist das %USERPROFILE%\\.stash\\config.yml) zurück. Du kannst Stash auch einen Pfad beim Start durch die Kommandozeilen-Option -c '' or --config '' vorgeben.", - "in_current_stash_directory": "Im Verzeichnis $HOME/.stash", - "in_the_current_working_directory": "Im aktuellen Arbeitsverzeichnis", - "next_step": "Nachdem das alles aus dem Weg ist, sind wir jetzt bereit ein neues System zu erstellen. Wähle dazu zunächst aus wo du die Konfigurationsdatei speichern möchtest und klicke auf Weiter.", - "store_stash_config": "Wo möchtest du die Stash Konfigurationsdatei speichern?", - "unable_to_locate_config": "Wenn du das hier liest, konnte Stash keine existierende Konfiguration finden. Dieser Wizard wird dich deshalb durch den Prozess führen, eine neue Konfiguration anzulegen.", - "unexpected_explained": "Wenn du diesen Wizard nicht erwartest, starte Stash im korrekten Arbeitsverzeichnis neu oder setze den Pfad zur Konfigurationsdatei mit der Kommandozeilenoption -c." + "status": "Status: {statusText}", + "studio": "Studio", + "studio_depth": "Ebenen (leer für alle)", + "studios": "Studios", + "sub_tag_count": "Anzahl an untergeordneten Tags", + "sub_tag_of": "Sub-Tag von {parent}", + "sub_tags": "Untergeordnete Tags", + "subsidiary_studios": "Untergeordnete Studios", + "synopsis": "Zusammenfassung", + "tag": "Tag", + "tag_count": "Tag-Anzahl", + "tags": "Tags", + "tattoos": "Tätowierungen", + "title": "Titel", + "toast": { + "added_entity": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} hinzugefügt", + "added_generation_job_to_queue": "Generierungsaufgabe zur Warteschlange hinzugefügt", + "created_entity": "{entity} erstellt", + "default_filter_set": "Standardfiltersatz", + "delete_past_tense": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} gelöscht", + "generating_screenshot": "Screenshot wird erstellt…", + "image_index_too_large": "Fehler: Bild-Index ist größer als die Anzahl der Bilder der Gallerie", + "merged_scenes": "Zusammengefasste Szene", + "merged_tags": "Zusammengeführte Tags", + "reassign_past_tense": "Datei neu zugewiesen", + "removed_entity": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} gelöscht", + "rescanning_entity": "Erneutes Scannen von {count, plural, one {{singularEntity}} other {{pluralEntity}}}…", + "saved_entity": "{entity} gespeichert", + "started_auto_tagging": "Automatisches Tagging gestartet", + "started_generating": "Generierung gestartet", + "started_importing": "Import gestartet", + "updated_entity": "{entity} aktualisiert" }, - "welcome_specific_config": { - "config_path": "Stash wird den folgenden Pfad für die Konfigurationsdatei verwenden: {path}", - "next_step": "Wenn du bereit bist ein neues System anzulegen, klicke Weiter.", - "unable_to_locate_specified_config": "Wenn du das hier liest, konnte Stash die Konfigurationsdatei, welche spezifiziert wurde, nicht finden. Dieser Wizard wird dich deshalb durch den Prozess führen, eine neue Konfiguration anzulegen." + "total": "Gesamt", + "true": "Wahr", + "twitter": "Twitter", + "type": "Typ", + "updated_at": "Aktualisiert am", + "url": "URL", + "validation": { + "aliases_must_be_unique": "Aliase müssen einzigartig sein", + "date_invalid_form": "${path} muss die Form YYYY-MM-DD haben", + "required": "${path} ist ein notwendiges Feld" }, - "welcome_to_stash": "Willkommen zu Stash" - }, - "stash_id": "Stash-ID", - "stash_id_endpoint": "Stash ID Endpunkt", - "stash_ids": "Stash IDs", - "stashbox": { - "go_review_draft": "Gehe zu {endpoint_name}, um Entwurf zu begutachten.", - "selected_stash_box": "Ausgewählter Stash-Box Endpunkt", - "submission_failed": "Einreichen fehlgeschlagen", - "submission_successful": "Einreichen erfolgreich", - "submit_update": "Existiert bereits in {endpoint_name}" - }, - "statistics": "Statistiken", - "stats": { - "image_size": "Bildspeicher", - "scenes_duration": "Szenendauer", - "scenes_size": "Szenenspeicher" - }, - "status": "Status: {statusText}", - "studio": "Studio", - "studio_depth": "Ebenen (leer für alle)", - "studios": "Studios", - "sub_tag_count": "Anzahl an untergeordneten Tags", - "sub_tag_of": "Sub-Tag von {parent}", - "sub_tags": "Untergeordnete Tags", - "subsidiary_studios": "Untergeordnete Studios", - "synopsis": "Zusammenfassung", - "tag": "Tag", - "tag_count": "Tag-Anzahl", - "tags": "Tags", - "tattoos": "Tätowierungen", - "title": "Titel", - "toast": { - "added_entity": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} hinzugefügt", - "added_generation_job_to_queue": "Generierungsaufgabe zur Warteschlange hinzugefügt", - "created_entity": "{entity} erstellt", - "default_filter_set": "Standardfiltersatz", - "delete_past_tense": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} gelöscht", - "generating_screenshot": "Screenshot wird erstellt…", - "image_index_too_large": "Fehler: Bild-Index ist größer als die Anzahl der Bilder der Gallerie", - "merged_scenes": "Zusammengefasste Szene", - "merged_tags": "Zusammengeführte Tags", - "reassign_past_tense": "Datei neu zugewiesen", - "removed_entity": "{count, plural, one {{singularEntity}} other {{pluralEntity}}} gelöscht", - "rescanning_entity": "Erneutes Scannen von {count, plural, one {{singularEntity}} other {{pluralEntity}}}…", - "saved_entity": "{entity} gespeichert", - "started_auto_tagging": "Automatisches Tagging gestartet", - "started_generating": "Generierung gestartet", - "started_importing": "Import gestartet", - "updated_entity": "{entity} aktualisiert" - }, - "total": "Gesamt", - "true": "Wahr", - "twitter": "Twitter", - "type": "Typ", - "updated_at": "Aktualisiert am", - "url": "URL", - "validation": { - "aliases_must_be_unique": "Aliase müssen einzigartig sein", - "date_invalid_form": "${path} muss die Form YYYY-MM-DD haben", - "required": "${path} ist ein notwendiges Feld" - }, - "videos": "Videos", - "view_all": "Alle ansehen", - "weight": "Gewicht", - "weight_kg": "Gewicht (kg)", - "years_old": "Jahre alt", - "zip_file_count": "Anzahl der Zip-Dateien" + "videos": "Videos", + "view_all": "Alle ansehen", + "weight": "Gewicht", + "weight_kg": "Gewicht (kg)", + "years_old": "Jahre alt", + "zip_file_count": "Anzahl der Zip-Dateien" } diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index b0efd41702b..3747b969d92 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -156,6 +156,8 @@ "active_instance": "Active stash-box instance:", "blacklist_desc": "Blacklist items are excluded from queries. Note that they are regular expressions and also case-insensitive. Certain characters must be escaped with a backslash: {chars_require_escape}", "blacklist_label": "Blacklist", + "mark_organized_desc": "Immediately mark the scene as Organized after the Save button is clicked.", + "mark_organized_label": "Mark as Organized on save", "query_mode_auto": "Auto", "query_mode_auto_desc": "Uses metadata if present, or filename", "query_mode_dir": "Dir", @@ -688,6 +690,7 @@ "heading": "Continue playlist by default" }, "enable_chromecast": "Enable Chromecast", + "show_ab_loop_controls": "Show AB Loop plugin controls", "show_scrubber": "Show Scrubber", "track_activity": "Track Activity", "vr_tag": { @@ -1072,7 +1075,7 @@ "penis_length_cm": "Penis Length (cm)", "perceptual_similarity": "Perceptual Similarity (phash)", "performer": "Performer", - "performerTags": "Performer Tags", + "performer_tags": "Performer Tags", "performer_age": "Performer Age", "performer_count": "Performer Count", "performer_favorite": "Performer Favourited", @@ -1127,7 +1130,7 @@ "resume_time": "Resume Time", "scene": "Scene", "sceneTagger": "Scene Tagger", - "sceneTags": "Scene Tags", + "scene_tags": "Scene Tags", "scene_code": "Studio Code", "scene_count": "Scene Count", "scene_created_at": "Scene Created At", @@ -1352,4 +1355,4 @@ "weight_kg": "Weight (kg)", "years_old": "years old", "zip_file_count": "Zip File Count" -} \ No newline at end of file +} diff --git a/ui/v2.5/src/locales/en-US.json b/ui/v2.5/src/locales/en-US.json index 656efc6da24..994966d53bb 100644 --- a/ui/v2.5/src/locales/en-US.json +++ b/ui/v2.5/src/locales/en-US.json @@ -1,24 +1,23 @@ { - "actions": { - "customise": "Customize" - }, - "eye_color": "Eye Color", - "favourite": "Favorite", - "hair_color": "Hair Color", - "organized": "Organized", - "config": { - "tools": { - "scene_filename_parser": { - "ignore_organized": "Ignore organized scenes" - } + "actions": { + "customise": "Customize" }, - "ui": { - "custom_locales": { - "heading": "Custom localization", - "option_label": "Custom localization enabled" - } - } - }, - "optimise_database": "Optimize Database", - "performer_favorite": "Performer Favorited" + "eye_color": "Eye Color", + "favourite": "Favorite", + "hair_color": "Hair Color", + "organized": "Organized", + "config": { + "tools": { + "scene_filename_parser": { + "ignore_organized": "Ignore organized scenes" + } + }, + "ui": { + "custom_locales": { + "heading": "Custom localization", + "option_label": "Custom localization enabled" + } + } + }, + "performer_favorite": "Performer Favorited" } diff --git a/ui/v2.5/src/locales/es-ES.json b/ui/v2.5/src/locales/es-ES.json index 50f88ceeb34..f1de915a85b 100644 --- a/ui/v2.5/src/locales/es-ES.json +++ b/ui/v2.5/src/locales/es-ES.json @@ -929,7 +929,7 @@ "path": "Ruta", "perceptual_similarity": "Similaridad perceptiva (phash)", "performer": "Actriz/Actor", - "performerTags": "Etiquetas de actriz/actor", + "performer_tags": "Etiquetas de actriz/actor", "performer_age": "Edad de la actriz/actor", "performer_count": "Número de actrices/actores", "performer_favorite": "Actriz/actor favorita/o", @@ -977,7 +977,7 @@ "resolution": "Resolución", "scene": "Escena", "sceneTagger": "Etiquetador de escenas", - "sceneTags": "Etiquetas de escena", + "scene_tags": "Etiquetas de escena", "scene_count": "Número de escenas", "scene_id": "Indentificador de escena", "scenes": "Escenas", diff --git a/ui/v2.5/src/locales/et-EE.json b/ui/v2.5/src/locales/et-EE.json index 948d8df3fd0..b4794e1be0c 100644 --- a/ui/v2.5/src/locales/et-EE.json +++ b/ui/v2.5/src/locales/et-EE.json @@ -1,1246 +1,1245 @@ { - "actions": { - "add": "Lisa", - "add_directory": "Lisa Kaust", - "add_entity": "Lisa {entityType}", - "add_to_entity": "Lisa {entityType}-sse", - "allow": "Luba", - "allow_temporarily": "Luba ajutiselt", - "anonymise": "Anonüümseks Muutmine", - "apply": "Rakenda", - "auto_tag": "Märgi Automaatselt", - "backup": "Varunda", - "browse_for_image": "Otsi pilti…", - "cancel": "Tühista", - "clean": "Puhasta", - "clear": "Eemalda", - "clear_back_image": "Eemalda tagapilt", - "clear_front_image": "Eemalda esipilt", - "clear_image": "Eemalda Pilt", - "close": "Sulge", - "confirm": "Kinnita", - "continue": "Jätka", - "create": "Loo", - "create_chapters": "Loo Peatükk", - "create_entity": "Loo {entityType}", - "create_marker": "Loo Marker", - "created_entity": "Loodud {entity_type}: {entity_name}", - "customise": "Kohanda", - "delete": "Kustuta", - "delete_entity": "Kustuta {entityType}", - "delete_file": "Kustuta fail", - "delete_file_and_funscript": "Kustuta fail (ja funscript)", - "delete_generated_supporting_files": "Kustuta genereeritud toetusfailid", - "delete_stashid": "Kustuta StashID", - "disallow": "Keela", - "download": "Lae alla", - "download_anonymised": "Lae alla anonümiseeritult", - "download_backup": "Lae varundus alla", - "edit": "Muuda", - "edit_entity": "Muuda {entityType}", - "export": "Ekspordi", - "export_all": "Ekspordi kõik…", - "find": "Otsi", - "finish": "Lõpeta", - "from_file": "Failist…", - "from_url": "URL-ilt…", - "full_export": "Täielik Eksportimine", - "full_import": "Täielik Importimine", - "generate": "Genereeri", - "generate_thumb_default": "Genereri vaikepisipilt", - "generate_thumb_from_current": "Genereeri pisipilt praegusest", - "hash_migration": "hashi migratsioon", - "hide": "Peida", - "hide_configuration": "Peida Seadistus", - "identify": "Tuvasta", - "ignore": "Ignoreeri", - "import": "Impordi…", - "import_from_file": "Impordi failist", - "logout": "Logi välja", - "make_primary": "Määra Peamiseks", - "merge": "Liida", - "merge_from": "Liida teisest", - "merge_into": "Liida teise", - "migrate_blobs": "Migreeri Blobid", - "migrate_scene_screenshots": "Migreeri Stseenide Ekraanipildid", - "next_action": "Järgmine", - "not_running": "ei jookse", - "open_in_external_player": "Ava välises mängijas", - "open_random": "Ava Suvaline", - "overwrite": "Kirjuta üle", - "play_random": "Mängi Suvaline", - "play_selected": "Mängi valitud", - "preview": "Eelvaade", - "previous_action": "Tagasi", - "reassign": "Määra Ümber", - "refresh": "Värskenda", - "reload_plugins": "Lae pluginad uuesti", - "reload_scrapers": "Lae kraapijad uuesti", - "remove": "Eemalda", - "remove_from_gallery": "Eemalda Galeriist", - "rename_gen_files": "Nimeta genereeritud failid ümber", - "rescan": "Skaneeri uuesti", - "reshuffle": "Sega uuesti", - "running": "jookseb", - "save": "Salvesta", - "save_delete_settings": "Kasuta neid sätteid kustutamisel tavasätetena", - "save_filter": "Salvesta filter", - "scan": "Skaneeri", - "scrape": "Kraabi", - "scrape_query": "Kraapimispäring", - "scrape_scene_fragment": "Kraabi fragmentide kaupa", - "scrape_with": "Kraabi kasutades…", - "search": "Otsi", - "select_all": "Vali Kõik", - "select_entity": "Vali {entityType}", - "select_folders": "Vali kaustad", - "select_none": "Vali Mitte Midagi", - "selective_auto_tag": "Valikuline Automaatne Märkija", - "selective_clean": "Valikuline Puhastus", - "selective_scan": "Valikuline Skaneerimine", - "set_as_default": "Määra vaikeväärtuseks", - "set_back_image": "Tagapilt…", - "set_front_image": "Esipilt…", - "set_image": "Seadista pilt…", - "show": "Näita", - "show_configuration": "Näita Seadistust", - "skip": "Jäta vahele", - "split": "Jaga Kaheks", - "stop": "Stop", - "submit": "Esita", - "submit_stash_box": "Esita Stash-Kasti", - "submit_update": "Esita uuendus", - "swap": "Vaheta", - "tasks": { - "clean_confirm_message": "Kas oled kindel, et tahad Puhastada? See kustutab andmebaasi ja genereeritud sisu kõikide stseenide ja galeriide jaoks, mida enam failisüsteemis ei leidu.", - "dry_mode_selected": "Kuiv režiim valitud. Tegelikku kustutamist ei toimu, ainult logidesse kirjutamine.", - "import_warning": "Kas oled kindel, et tahad importida? See kustutab andmebaasi ja impordib ekporditud metaandmed uuesti." - }, - "temp_disable": "Keela ajutiselt…", - "temp_enable": "Luba ajutiselt…", - "unset": "Tühista", - "use_default": "Kasuta vaikeseadet", - "view_random": "Vaata Suvalist" - }, - "actions_name": "Tegevused", - "age": "Vanus", - "aliases": "Varjunimed", - "all": "kõik", - "also_known_as": "Tuntud ka kui", - "appears_with": "Esineb Koos", - "ascending": "Kasvav", - "average_resolution": "Keskmine Resolutsioon", - "between_and": "ja", - "birth_year": "Sünniaasta", - "birthdate": "Sünnikuupäev", - "bitrate": "Bitikiirus", - "blobs_storage_type": { - "database": "Andmebaas", - "filesystem": "Failisüsteem" - }, - "captions": "Subtiitrid", - "career_length": "Karjääri Pikkus", - "chapters": "Peatükid", - "component_tagger": { - "config": { - "active_instance": "Aktiivne stash-kasti eksemplar:", - "blacklist_desc": "Musta nimekirja üksused on päringutest välja jäetud. Pane tähele, et need on regulaaravaldised ja tõstutundetud. Teatud tähemärgid tuleb eemaldada kaldkriipsuga: {chars_require_escape}", - "blacklist_label": "Must nimekiri", - "query_mode_auto": "Auto", - "query_mode_auto_desc": "Kasutab metaandmeid, kui need olemas on, või failinime", - "query_mode_dir": "Kaust", - "query_mode_dir_desc": "Kasutab ainult videofaili kausta", - "query_mode_filename": "Failinimi", - "query_mode_filename_desc": "Kasutab ainult failinime", - "query_mode_label": "Päringurežiim", - "query_mode_metadata": "Metaandmed", - "query_mode_metadata_desc": "Kasutab ainult metaandmeid", - "query_mode_path": "Failitee", - "query_mode_path_desc": "Kasutab kogu failiteed", - "set_cover_desc": "Asenda stseeni kaanepilt, kui seda õnnestub leida.", - "set_cover_label": "Määra stseeni kaanepilt", - "set_tag_desc": "Ühenda stseenile külge silte, kas olemasolevate siltide ülekirjutamise või liitmise kaudu.", - "set_tag_label": "Määra sildid", - "show_male_desc": "Vali, kas meesnäitlejad on määramiseks saadaval.", - "show_male_label": "Näita meesnäitlejaid", - "source": "Allikas" - }, - "noun_query": "Päring", - "results": { - "duration_off": "Kestus on vähemalt {number}s vale", - "duration_unknown": "Kestus teadmata", - "fp_found": "{fpCount, plural, =0 {Uusi sõrmejälje kattuvusi ei leitud} other {# uut sõrmejälje kattuvust leitud}}", - "fp_matches": "Kestus klapib", - "fp_matches_multi": "Kestus klapib {matchCount}/{durationsLength} sõrmejälgedel", - "hash_matches": "{hash_type} klapib", - "match_failed_already_tagged": "Stseen juba sildistatud", - "match_failed_no_result": "Vasteid ei leitud", - "match_success": "Stseen edukalt sildistatud", - "phash_matches": "{count} PHashi kattuvust", - "unnamed": "Nimeta" - }, - "verb_match_fp": "Leia Sõrmejälje Kattuvusi", - "verb_matched": "Kokkusobitatud", - "verb_scrape_all": "Kraabi Kõikjalt", - "verb_submit_fp": "Esita {fpCount, plural, one{# Sõrmejälg} other{# Sõrmejälge}}", - "verb_toggle_config": "{toggle} {configuration}", - "verb_toggle_unmatched": "{toggle} kokkusobitamata stseenid" - }, - "config": { - "about": { - "build_hash": "Ehituse hash:", - "build_time": "Ehituse aeg:", - "check_for_new_version": "Kontrolli värskendusi", - "latest_version": "Uusim Versioon", - "latest_version_build_hash": "Uusima Versiooni Ehituse Hash:", - "new_version_notice": "[UUS]", - "release_date": "Väljalaskekuupäev:", - "stash_discord": "Liitu meie {url}i kanaliga", - "stash_home": "Stashi kodu {url}-is", - "stash_open_collective": "Toeta meid läbi {url}-i", - "stash_wiki": "Stashi {url} leht", - "version": "Versioon" - }, - "application_paths": { - "heading": "Rakenduse Failiteed" + "actions": { + "add": "Lisa", + "add_directory": "Lisa Kaust", + "add_entity": "Lisa {entityType}", + "add_to_entity": "Lisa {entityType}-sse", + "allow": "Luba", + "allow_temporarily": "Luba ajutiselt", + "anonymise": "Anonüümseks Muutmine", + "apply": "Rakenda", + "auto_tag": "Märgi Automaatselt", + "backup": "Varunda", + "browse_for_image": "Otsi pilti…", + "cancel": "Tühista", + "clean": "Puhasta", + "clear": "Eemalda", + "clear_back_image": "Eemalda tagapilt", + "clear_front_image": "Eemalda esipilt", + "clear_image": "Eemalda Pilt", + "close": "Sulge", + "confirm": "Kinnita", + "continue": "Jätka", + "create": "Loo", + "create_chapters": "Loo Peatükk", + "create_entity": "Loo {entityType}", + "create_marker": "Loo Marker", + "created_entity": "Loodud {entity_type}: {entity_name}", + "customise": "Kohanda", + "delete": "Kustuta", + "delete_entity": "Kustuta {entityType}", + "delete_file": "Kustuta fail", + "delete_file_and_funscript": "Kustuta fail (ja funscript)", + "delete_generated_supporting_files": "Kustuta genereeritud toetusfailid", + "delete_stashid": "Kustuta StashID", + "disallow": "Keela", + "download": "Lae alla", + "download_anonymised": "Lae alla anonümiseeritult", + "download_backup": "Lae varundus alla", + "edit": "Muuda", + "edit_entity": "Muuda {entityType}", + "export": "Ekspordi", + "export_all": "Ekspordi kõik…", + "find": "Otsi", + "finish": "Lõpeta", + "from_file": "Failist…", + "from_url": "URL-ilt…", + "full_export": "Täielik Eksportimine", + "full_import": "Täielik Importimine", + "generate": "Genereeri", + "generate_thumb_default": "Genereri vaikepisipilt", + "generate_thumb_from_current": "Genereeri pisipilt praegusest", + "hash_migration": "hashi migratsioon", + "hide": "Peida", + "hide_configuration": "Peida Seadistus", + "identify": "Tuvasta", + "ignore": "Ignoreeri", + "import": "Impordi…", + "import_from_file": "Impordi failist", + "logout": "Logi välja", + "make_primary": "Määra Peamiseks", + "merge": "Liida", + "merge_from": "Liida teisest", + "merge_into": "Liida teise", + "migrate_blobs": "Migreeri Blobid", + "migrate_scene_screenshots": "Migreeri Stseenide Ekraanipildid", + "next_action": "Järgmine", + "not_running": "ei jookse", + "open_in_external_player": "Ava välises mängijas", + "open_random": "Ava Suvaline", + "overwrite": "Kirjuta üle", + "play_random": "Mängi Suvaline", + "play_selected": "Mängi valitud", + "preview": "Eelvaade", + "previous_action": "Tagasi", + "reassign": "Määra Ümber", + "refresh": "Värskenda", + "reload_plugins": "Lae pluginad uuesti", + "reload_scrapers": "Lae kraapijad uuesti", + "remove": "Eemalda", + "remove_from_gallery": "Eemalda Galeriist", + "rename_gen_files": "Nimeta genereeritud failid ümber", + "rescan": "Skaneeri uuesti", + "reshuffle": "Sega uuesti", + "running": "jookseb", + "save": "Salvesta", + "save_delete_settings": "Kasuta neid sätteid kustutamisel tavasätetena", + "save_filter": "Salvesta filter", + "scan": "Skaneeri", + "scrape": "Kraabi", + "scrape_query": "Kraapimispäring", + "scrape_scene_fragment": "Kraabi fragmentide kaupa", + "scrape_with": "Kraabi kasutades…", + "search": "Otsi", + "select_all": "Vali Kõik", + "select_entity": "Vali {entityType}", + "select_folders": "Vali kaustad", + "select_none": "Vali Mitte Midagi", + "selective_auto_tag": "Valikuline Automaatne Märkija", + "selective_clean": "Valikuline Puhastus", + "selective_scan": "Valikuline Skaneerimine", + "set_as_default": "Määra vaikeväärtuseks", + "set_back_image": "Tagapilt…", + "set_front_image": "Esipilt…", + "set_image": "Seadista pilt…", + "show": "Näita", + "show_configuration": "Näita Seadistust", + "skip": "Jäta vahele", + "split": "Jaga Kaheks", + "stop": "Stop", + "submit": "Esita", + "submit_stash_box": "Esita Stash-Kasti", + "submit_update": "Esita uuendus", + "swap": "Vaheta", + "tasks": { + "clean_confirm_message": "Kas oled kindel, et tahad Puhastada? See kustutab andmebaasi ja genereeritud sisu kõikide stseenide ja galeriide jaoks, mida enam failisüsteemis ei leidu.", + "dry_mode_selected": "Kuiv režiim valitud. Tegelikku kustutamist ei toimu, ainult logidesse kirjutamine.", + "import_warning": "Kas oled kindel, et tahad importida? See kustutab andmebaasi ja impordib ekporditud metaandmed uuesti." + }, + "temp_disable": "Keela ajutiselt…", + "temp_enable": "Luba ajutiselt…", + "unset": "Tühista", + "use_default": "Kasuta vaikeseadet", + "view_random": "Vaata Suvalist" }, - "categories": { - "about": "Lisainfo", - "changelog": "Muudatuste nimekiri", - "interface": "Kasutajaliides", - "logs": "Logid", - "metadata_providers": "Metaandmete Pakkujad", - "plugins": "Pluginad", - "scraping": "Kraapimine", - "security": "Turvalisus", - "services": "Teenused", - "system": "Süsteem", - "tasks": "Ülesanded", - "tools": "Tööriistad" + "actions_name": "Tegevused", + "age": "Vanus", + "aliases": "Varjunimed", + "all": "kõik", + "also_known_as": "Tuntud ka kui", + "appears_with": "Esineb Koos", + "ascending": "Kasvav", + "average_resolution": "Keskmine Resolutsioon", + "between_and": "ja", + "birth_year": "Sünniaasta", + "birthdate": "Sünnikuupäev", + "bitrate": "Bitikiirus", + "blobs_storage_type": { + "database": "Andmebaas", + "filesystem": "Failisüsteem" }, - "dlna": { - "allow_temp_ip": "Luba {tempIP}", - "allowed_ip_addresses": "Lubatud IP aadressid", - "allowed_ip_temporarily": "Lubatud IP ajutiselt", - "default_ip_whitelist": "Vaikimisi IP Valge Nimekiri", - "default_ip_whitelist_desc": "Vaikimisi IP aadressid lubavad DLNA ligipääsu. Kasuta {wildcard}, et lubada kõiki IP aadresse.", - "disabled_dlna_temporarily": "DLNA ajutiselt keelatud", - "disallowed_ip": "Keelatud IP", - "enabled_by_default": "Vaikimisi lubatud", - "enabled_dlna_temporarily": "DLNA ajutiselt lubatud", - "network_interfaces": "Kasutajaliidesed", - "network_interfaces_desc": "Kasutajaliidesed DLNA serveri paljastamiseks. Tühi nimekiri lubab jooksutamist kõigil kasutajaliidestel. Vajalik DLNA taaskäivitus peale muutmist.", - "recent_ip_addresses": "Hiljutised IP aadressid", - "server_display_name": "Serveri Nimi", - "server_display_name_desc": "DLNA server nimi. Vaikimisi {server_name}, kui midagi pole sisestatud.", - "successfully_cancelled_temporary_behaviour": "Edukalt tühistatud ajutine käitumine", - "until_restart": "restardini", - "video_sort_order": "Videote Sorteerimise Vaikeväärtus", - "video_sort_order_desc": "Viis, kuidas vaikimisi videoid sorteerida." + "captions": "Subtiitrid", + "career_length": "Karjääri Pikkus", + "chapters": "Peatükid", + "component_tagger": { + "config": { + "active_instance": "Aktiivne stash-kasti eksemplar:", + "blacklist_desc": "Musta nimekirja üksused on päringutest välja jäetud. Pane tähele, et need on regulaaravaldised ja tõstutundetud. Teatud tähemärgid tuleb eemaldada kaldkriipsuga: {chars_require_escape}", + "blacklist_label": "Must nimekiri", + "query_mode_auto": "Auto", + "query_mode_auto_desc": "Kasutab metaandmeid, kui need olemas on, või failinime", + "query_mode_dir": "Kaust", + "query_mode_dir_desc": "Kasutab ainult videofaili kausta", + "query_mode_filename": "Failinimi", + "query_mode_filename_desc": "Kasutab ainult failinime", + "query_mode_label": "Päringurežiim", + "query_mode_metadata": "Metaandmed", + "query_mode_metadata_desc": "Kasutab ainult metaandmeid", + "query_mode_path": "Failitee", + "query_mode_path_desc": "Kasutab kogu failiteed", + "set_cover_desc": "Asenda stseeni kaanepilt, kui seda õnnestub leida.", + "set_cover_label": "Määra stseeni kaanepilt", + "set_tag_desc": "Ühenda stseenile külge silte, kas olemasolevate siltide ülekirjutamise või liitmise kaudu.", + "set_tag_label": "Määra sildid", + "show_male_desc": "Vali, kas meesnäitlejad on määramiseks saadaval.", + "show_male_label": "Näita meesnäitlejaid", + "source": "Allikas" + }, + "noun_query": "Päring", + "results": { + "duration_off": "Kestus on vähemalt {number}s vale", + "duration_unknown": "Kestus teadmata", + "fp_found": "{fpCount, plural, =0 {Uusi sõrmejälje kattuvusi ei leitud} other {# uut sõrmejälje kattuvust leitud}}", + "fp_matches": "Kestus klapib", + "fp_matches_multi": "Kestus klapib {matchCount}/{durationsLength} sõrmejälgedel", + "hash_matches": "{hash_type} klapib", + "match_failed_already_tagged": "Stseen juba sildistatud", + "match_failed_no_result": "Vasteid ei leitud", + "match_success": "Stseen edukalt sildistatud", + "phash_matches": "{count} PHashi kattuvust", + "unnamed": "Nimeta" + }, + "verb_match_fp": "Leia Sõrmejälje Kattuvusi", + "verb_matched": "Kokkusobitatud", + "verb_scrape_all": "Kraabi Kõikjalt", + "verb_submit_fp": "Esita {fpCount, plural, one{# Sõrmejälg} other{# Sõrmejälge}}", + "verb_toggle_config": "{toggle} {configuration}", + "verb_toggle_unmatched": "{toggle} kokkusobitamata stseenid" }, - "general": { - "auth": { - "api_key": "API Võti", - "api_key_desc": "API võti välistele süsteemidele. Nõutud ainult siis, kui kasutajanimi/parool on sätitud. Kasutajanimi peab olema salvestatud enne API võtme genereerimist.", - "authentication": "Autentimine", - "clear_api_key": "Puhasta API võti", - "credentials": { - "description": "Mandaat Stashile ligipääsu piiramiseks.", - "heading": "Mandaat" + "config": { + "about": { + "build_hash": "Ehituse hash:", + "build_time": "Ehituse aeg:", + "check_for_new_version": "Kontrolli värskendusi", + "latest_version": "Uusim Versioon", + "latest_version_build_hash": "Uusima Versiooni Ehituse Hash:", + "new_version_notice": "[UUS]", + "release_date": "Väljalaskekuupäev:", + "stash_discord": "Liitu meie {url}i kanaliga", + "stash_home": "Stashi kodu {url}-is", + "stash_open_collective": "Toeta meid läbi {url}-i", + "stash_wiki": "Stashi {url} leht", + "version": "Versioon" + }, + "application_paths": { + "heading": "Rakenduse Failiteed" + }, + "categories": { + "about": "Lisainfo", + "changelog": "Muudatuste nimekiri", + "interface": "Kasutajaliides", + "logs": "Logid", + "metadata_providers": "Metaandmete Pakkujad", + "plugins": "Pluginad", + "scraping": "Kraapimine", + "security": "Turvalisus", + "services": "Teenused", + "system": "Süsteem", + "tasks": "Ülesanded", + "tools": "Tööriistad" + }, + "dlna": { + "allow_temp_ip": "Luba {tempIP}", + "allowed_ip_addresses": "Lubatud IP aadressid", + "allowed_ip_temporarily": "Lubatud IP ajutiselt", + "default_ip_whitelist": "Vaikimisi IP Valge Nimekiri", + "default_ip_whitelist_desc": "Vaikimisi IP aadressid lubavad DLNA ligipääsu. Kasuta {wildcard}, et lubada kõiki IP aadresse.", + "disabled_dlna_temporarily": "DLNA ajutiselt keelatud", + "disallowed_ip": "Keelatud IP", + "enabled_by_default": "Vaikimisi lubatud", + "enabled_dlna_temporarily": "DLNA ajutiselt lubatud", + "network_interfaces": "Kasutajaliidesed", + "network_interfaces_desc": "Kasutajaliidesed DLNA serveri paljastamiseks. Tühi nimekiri lubab jooksutamist kõigil kasutajaliidestel. Vajalik DLNA taaskäivitus peale muutmist.", + "recent_ip_addresses": "Hiljutised IP aadressid", + "server_display_name": "Serveri Nimi", + "server_display_name_desc": "DLNA server nimi. Vaikimisi {server_name}, kui midagi pole sisestatud.", + "successfully_cancelled_temporary_behaviour": "Edukalt tühistatud ajutine käitumine", + "until_restart": "restardini", + "video_sort_order": "Videote Sorteerimise Vaikeväärtus", + "video_sort_order_desc": "Viis, kuidas vaikimisi videoid sorteerida." + }, + "general": { + "auth": { + "api_key": "API Võti", + "api_key_desc": "API võti välistele süsteemidele. Nõutud ainult siis, kui kasutajanimi/parool on sätitud. Kasutajanimi peab olema salvestatud enne API võtme genereerimist.", + "authentication": "Autentimine", + "clear_api_key": "Puhasta API võti", + "credentials": { + "description": "Mandaat Stashile ligipääsu piiramiseks.", + "heading": "Mandaat" + }, + "generate_api_key": "Genereeri API võti", + "log_file": "Logi fail", + "log_file_desc": "Failitee failini, kuhu logid sisestada. Jäta tühjaks, kui soovid logide salvestamise välja lülitada. Vajab taaskäivitust.", + "log_http": "Logi http ligipääs", + "log_http_desc": "Avaldab http ligipääsu logid terminali. Vajab taaskäivitust.", + "log_to_terminal": "Logi terminali", + "log_to_terminal_desc": "Avaldab logid lisaks failile ka terminalis. Alati sisselülitatud, kui logimine faili on keelatud. Vajab taaskäivitust.", + "maximum_session_age": "Maksimaalne Sessiooni Vanus", + "maximum_session_age_desc": "Maksimaalne paigalseisuaeg enne kui sessioon aegub, sekundites.", + "password": "Parool", + "password_desc": "Parool Stashi pääsemiseks. Jäta tühjaks, kui soovid sisselogimise keelata", + "stash-box_integration": "Stash-kasti integratsioon", + "username": "Kasutajanimi", + "username_desc": "Kasutajanimi Stashi pääsemiseks. Jäta tühjaks, kui soovid sisselogimise keelata" + }, + "backup_directory_path": { + "description": "Failitee SQLite andmebaasi varundusfailide jaoks", + "heading": "Varunduse Failitee" + }, + "blobs_path": { + "description": "Kus kohas hoida binaarseid andmeid failisüsteemis. Kehtib ainult kui kasutad Failisüsteem blob salvestustüüpi. HOIATUS: selle muutmine nõuab olemasolevate andmete manuaalset liigutamist.", + "heading": "Binaarseete andmete failisüsteemi tee" + }, + "blobs_storage": { + "description": "Kus hoida binaarseid andmeid nagu stseeni kaanepildid, näitlejate, stuudiote ja siltide pilte. Peale selle väärtuse muutmist tuleb olemasolevad andmed migreerida kasutades Migreeri Blobe ülesannet. Vaata Ülesannete lehele migreerimiseks.", + "heading": "Binaarsete andmete hoiustamistüüp" + }, + "cache_location": "Failitee vahemäluni. Nõutud kui striimimiseks kasutatakse HLSi (näiteks Apple seadetel) või DASHi.", + "cache_path_head": "Vahemälu Failitee", + "calculate_md5_and_ohash_desc": "Kalkuleeri MD5 checksum lisaks oshashile. Lubamine põhjustab aeglasemat esmast skaneerimist. Faili nimetuse hash peab olema sätitud oshashiks, et keelata MD5 kalkuleerimine.", + "calculate_md5_and_ohash_label": "Kalkuleeri MD5 videote jaoks", + "check_for_insecure_certificates": "Otsi ebaturvalisi sertifikaate", + "check_for_insecure_certificates_desc": "Mõned lehed kasutavad ebaturvalisi ssl sertifikaate. Kui märkimata, kraapija jätab sertifikaadi kontrollimise vahele ning võimaldab nendelt lehtedelt andmeid kraapida. Kui kraapimise ajal esineb sertifikaadivigu, eemalda linnuke.", + "chrome_cdp_path": "Chrome CDP tee", + "chrome_cdp_path_desc": "Failitee Chrome käivitajani, või kaugaadress (algab http:// või https:// -iga, näiteks http://localhost:9222/json/version) Chrome'i eksemplarini.", + "create_galleries_from_folders_desc": "Kui lubatud, loob vaikeväärtusena galeriisid pilte sisaldavatest kaustadest. Loo kasutas fail nimega .forcegallery või .nogallery, et seda sundida või sellest hoiduda.", + "create_galleries_from_folders_label": "Loo galeriisid kaustadest, mis sisaldavad pilte", + "database": "Andmebaas", + "db_path_head": "Andmebaasi Failitee", + "directory_locations_to_your_content": "Failitee asukohad sisule", + "excluded_image_gallery_patterns_desc": "Pildi- ja galeriifailide/teede regexpid, mida skannimisest välja jätta ja Clean'i lisada", + "excluded_image_gallery_patterns_head": "Välistatud Pildi/Galerii Mustrid", + "excluded_video_patterns_desc": "Videofailide/teede regexpid, mida skannimisest välja jätta ja Clean'i lisada", + "excluded_video_patterns_head": "Välistatud Video Mustrid", + "ffmpeg": { + "hardware_acceleration": { + "desc": "Kasutab olemasolevat riistvara reaalajas video transkodeerimiseks.", + "heading": "FFmpeg riistvara enkodeerimine" + }, + "live_transcode": { + "input_args": { + "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi sisendväljale live video transkodeerimise ajal.", + "heading": "FFmpeg Live Transkodeerimise Sisendargumendid" + }, + "output_args": { + "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi väljundväljale live video transkodeerimise ajal.", + "heading": "FFmpeg Live Transkodeerimise Väljundargumendid" + } + }, + "transcode": { + "input_args": { + "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi sisendväljale video genereerimisel.", + "heading": "FFmpeg Transkodeerimise Sisendargumendid" + }, + "output_args": { + "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi väljundväljale video genereerimisel.", + "heading": "FFmpeg Transkodeerimise Väljundargumendid" + } + } + }, + "funscript_heatmap_draw_range": "Kaasa vahemik genereeritud kuumkaartidel", + "funscript_heatmap_draw_range_desc": "Joonista liikumisvahemik genereeritud kuumkaardi y-teljel. Olemasolevad kuumkaardid tuleb peale muutmist uuesti genereerida.", + "gallery_cover_regex_desc": "Regexp kasutakse, et tuvastada pilti kui galerii kaanepildina", + "gallery_cover_regex_label": "Galerii kaanepildi muster", + "gallery_ext_desc": "Komadega eraldatud faililaiendite loend, mis tuvastatakse galerii ZIP-failidena.", + "gallery_ext_head": "Galerii zip Laiendused", + "generated_file_naming_hash_desc": "Kasutage failide nimetamiseks MD5 või oshashi. Selle muutmiseks on vaja, et kõikides stseenides oleks kohaldatav MD5/oshash väärtus täidetud. Pärast selle väärtuse muutmist tuleb olemasolevad loodud failid migreerida või uuesti genereerida. Vaadake üleviimise kohta lehekülge Ülesanded.", + "generated_file_naming_hash_head": "Genereeritud faili nimetamise hash", + "generated_files_location": "Loodud failide (stseenimarkerid, stseeni eelvaated, spraidid jne) asukoht Failiteel", + "generated_path_head": "Genereeritud Failitee", + "hashing": "Hashimine", + "heatmap_generation": "Funscripti Kuumkaardi Genereerimine", + "image_ext_desc": "Komadega eraldatud faililaiendite loend, mis tuvastatakse piltidena.", + "image_ext_head": "Pildilaiendused", + "include_audio_desc": "Kaasa eelvaadete loomisel helivoog.", + "include_audio_head": "Kaasa heli", + "logging": "Logimine", + "maximum_streaming_transcode_size_desc": "Transkodeeritud voogude maksimaalne suurus", + "maximum_streaming_transcode_size_head": "Maksimaalne voogesituse ümberkodeerimise suurus", + "maximum_transcode_size_desc": "Loodud ümberkoodimiste maksimaalne suurus", + "maximum_transcode_size_head": "Maksimaalne ümberkodeerimise suurus", + "metadata_path": { + "description": "Kataloogi asukoht, mida kasutatakse täieliku ekspordi või impordi teostamisel", + "heading": "Metaandmete Failitee" + }, + "number_of_parallel_task_for_scan_generation_desc": "Automaatse tuvastamise jaoks määra 0. Hoiatus, kui tehakse rohkem toiminguid, kui on vaja 100% protsessori kasutuse saavutamiseks, väheneb jõudlus ja võib esineda muid probleeme.", + "number_of_parallel_task_for_scan_generation_head": "Paralleelsete skaneerimise/genereerimise ülesannete arv", + "parallel_scan_head": "Paralleelne Skaneerimine/Generatsioon", + "preview_generation": "Eelvaate Genereerimine", + "python_path": { + "description": "Pythoni käivitataja asukoht. Kasutatakse skriptipõhiste kraapijate ja pluginate jaoks. Kui see on tühi, lahendatakse python keskkonnast", + "heading": "Pythoni Failitee" + }, + "scraper_user_agent": "Kraapija Kasutajaagent", + "scraper_user_agent_desc": "Kasutajaagendi string, mida kasutatakse kraapimise HTTP-päringute käigus", + "scrapers_path": { + "description": "Failitee kraapijate sättefailide jaoks", + "heading": "Kraapijate Failitee" + }, + "scraping": "Kraapimine", + "sqlite_location": "Failitee asukoht SQLite andmebaasi jaoks (vajab taaskäivitust). HOIATUS: andmebaasi kasutamine, mis ei jookse samal süsteemil, kui Stash (nt üle võrgu) ei ole toetatud!", + "video_ext_desc": "Komadega eraldatud faililaiendite loend, mis tuvastatakse videotena.", + "video_ext_head": "Videolaiendused", + "video_head": "Video" + }, + "library": { + "exclusions": "Välistused", + "gallery_and_image_options": "Galerii ja Piltide sätted", + "media_content_extensions": "Media sisu laiendused" + }, + "logs": { + "log_level": "Logimise tase" + }, + "plugins": { + "hooks": "Hookid", + "triggers_on": "Sisselülitatud päästikud" + }, + "scraping": { + "entity_metadata": "{entityType} Metaandmed", + "entity_scrapers": "{entityType} kraapijad", + "excluded_tag_patterns_desc": "Sildinimede regexpid, mida kraapise tulemustest välja jätta", + "excluded_tag_patterns_head": "Välistatud Siltide Mustrid", + "scraper": "Kraapija", + "scrapers": "Kraapijad", + "search_by_name": "Otsi nime järgi", + "supported_types": "Toetatud tüübid", + "supported_urls": "URL-id" + }, + "stashbox": { + "add_instance": "Lisa stash-kasti eksemplar", + "api_key": "API võti", + "description": "Stash-box hõlbustab stseenide ja esinejate automaatset märgistamist sõrmejälgede ja failinimede põhjal.\nLõpp-punkti ja API võtme leiad oma konto lehelt stash-kasti eksemplaris. Nimed on nõutavad, kui lisatakse rohkem kui üks eksemplar.", + "endpoint": "Lõpp-punkt", + "graphql_endpoint": "GraphQL lõpp-punkt", + "name": "Nimi", + "title": "Stash-kasti Lõpp-punktid" }, - "generate_api_key": "Genereeri API võti", - "log_file": "Logi fail", - "log_file_desc": "Failitee failini, kuhu logid sisestada. Jäta tühjaks, kui soovid logide salvestamise välja lülitada. Vajab taaskäivitust.", - "log_http": "Logi http ligipääs", - "log_http_desc": "Avaldab http ligipääsu logid terminali. Vajab taaskäivitust.", - "log_to_terminal": "Logi terminali", - "log_to_terminal_desc": "Avaldab logid lisaks failile ka terminalis. Alati sisselülitatud, kui logimine faili on keelatud. Vajab taaskäivitust.", - "maximum_session_age": "Maksimaalne Sessiooni Vanus", - "maximum_session_age_desc": "Maksimaalne paigalseisuaeg enne kui sessioon aegub, sekundites.", - "password": "Parool", - "password_desc": "Parool Stashi pääsemiseks. Jäta tühjaks, kui soovid sisselogimise keelata", - "stash-box_integration": "Stash-kasti integratsioon", - "username": "Kasutajanimi", - "username_desc": "Kasutajanimi Stashi pääsemiseks. Jäta tühjaks, kui soovid sisselogimise keelata" - }, - "backup_directory_path": { - "description": "Failitee SQLite andmebaasi varundusfailide jaoks", - "heading": "Varunduse Failitee" - }, - "blobs_path": { - "description": "Kus kohas hoida binaarseid andmeid failisüsteemis. Kehtib ainult kui kasutad Failisüsteem blob salvestustüüpi. HOIATUS: selle muutmine nõuab olemasolevate andmete manuaalset liigutamist.", - "heading": "Binaarseete andmete failisüsteemi tee" - }, - "blobs_storage": { - "description": "Kus hoida binaarseid andmeid nagu stseeni kaanepildid, näitlejate, stuudiote ja siltide pilte. Peale selle väärtuse muutmist tuleb olemasolevad andmed migreerida kasutades Migreeri Blobe ülesannet. Vaata Ülesannete lehele migreerimiseks.", - "heading": "Binaarsete andmete hoiustamistüüp" - }, - "cache_location": "Failitee vahemäluni. Nõutud kui striimimiseks kasutatakse HLSi (näiteks Apple seadetel) või DASHi.", - "cache_path_head": "Vahemälu Failitee", - "calculate_md5_and_ohash_desc": "Kalkuleeri MD5 checksum lisaks oshashile. Lubamine põhjustab aeglasemat esmast skaneerimist. Faili nimetuse hash peab olema sätitud oshashiks, et keelata MD5 kalkuleerimine.", - "calculate_md5_and_ohash_label": "Kalkuleeri MD5 videote jaoks", - "check_for_insecure_certificates": "Otsi ebaturvalisi sertifikaate", - "check_for_insecure_certificates_desc": "Mõned lehed kasutavad ebaturvalisi ssl sertifikaate. Kui märkimata, kraapija jätab sertifikaadi kontrollimise vahele ning võimaldab nendelt lehtedelt andmeid kraapida. Kui kraapimise ajal esineb sertifikaadivigu, eemalda linnuke.", - "chrome_cdp_path": "Chrome CDP tee", - "chrome_cdp_path_desc": "Failitee Chrome käivitajani, või kaugaadress (algab http:// või https:// -iga, näiteks http://localhost:9222/json/version) Chrome'i eksemplarini.", - "create_galleries_from_folders_desc": "Kui lubatud, loob vaikeväärtusena galeriisid pilte sisaldavatest kaustadest. Loo kasutas fail nimega .forcegallery või .nogallery, et seda sundida või sellest hoiduda.", - "create_galleries_from_folders_label": "Loo galeriisid kaustadest, mis sisaldavad pilte", - "database": "Andmebaas", - "db_path_head": "Andmebaasi Failitee", - "directory_locations_to_your_content": "Failitee asukohad sisule", - "excluded_image_gallery_patterns_desc": "Pildi- ja galeriifailide/teede regexpid, mida skannimisest välja jätta ja Clean'i lisada", - "excluded_image_gallery_patterns_head": "Välistatud Pildi/Galerii Mustrid", - "excluded_video_patterns_desc": "Videofailide/teede regexpid, mida skannimisest välja jätta ja Clean'i lisada", - "excluded_video_patterns_head": "Välistatud Video Mustrid", - "ffmpeg": { - "hardware_acceleration": { - "desc": "Kasutab olemasolevat riistvara reaalajas video transkodeerimiseks.", - "heading": "FFmpeg riistvara enkodeerimine" + "system": { + "transcoding": "Ümbertöötlemine" }, - "live_transcode": { - "input_args": { - "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi sisendväljale live video transkodeerimise ajal.", - "heading": "FFmpeg Live Transkodeerimise Sisendargumendid" - }, - "output_args": { - "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi väljundväljale live video transkodeerimise ajal.", - "heading": "FFmpeg Live Transkodeerimise Väljundargumendid" - } + "tasks": { + "added_job_to_queue": "{operation_name} lisatud tööde järjekorda", + "anonymise_and_download": "Loob anonüümse koopia andmebaasist ja laeb väljundfaili alla.", + "anonymise_database": "Loob andmebaasist tagavarakoopia tagavara koopiate kausta, muudab tundliku teabe anonüümseks. Seda saab jagada teistega abistamise ja probleemide analüüsimise eesmärgil. Originaalset andmebaasi ei muudeta. Anonüümne andmebaas kasutab failinime formaati {filename_format}.", + "anonymising_database": "Muudan andmebaasi anonüümseks", + "auto_tag": { + "auto_tagging_all_paths": "Automaatne Märkimine kõikidel failiteedel", + "auto_tagging_paths": "Automaatne Märkimine järgnevatel failiteedel" + }, + "auto_tag_based_on_filenames": "Automaatselt märgi sisu vastavalt failinimedele.", + "auto_tagging": "Automaatne Märkimine", + "backing_up_database": "Andmebaasi varundamine", + "backup_and_download": "Teeb andmebaasist varukoopia ja laadib saadud faili alla.", + "backup_database": "Varundab andmebaasi varundamise failiteele, failinimega vormingus {filename_format}", + "cleanup_desc": "Kontrolli puuduvaid faile ja eemalda need andmebaasist. See on hävitav tegevus.", + "data_management": "Andmehaldus", + "defaults_set": "Vaikesätted on määratud ja neid kasutatakse, kui klõpsate lehel Ülesanded nupul {action}.", + "dont_include_file_extension_as_part_of_the_title": "Ärge lisage pealkirja osana faililaiendit", + "empty_queue": "Praegu ei tööta ühtegi ülesannet.", + "export_to_json": "Ekspordib andmebaasi sisu metaandmete failiteele JSON-vormingus.", + "generate": { + "generating_from_paths": "Genereeri stseenide jaoks järgnevatelt failiteedelt", + "generating_scenes": "Genereerimine {num} {scene} jaoks" + }, + "generate_desc": "Genereeri toetavad pildi-, sprite-, video-, vtt- ja muud failid.", + "generate_phashes_during_scan": "Genereeri nähtavaid hashe", + "generate_phashes_during_scan_tooltip": "Duplikaatide eemaldamiseks ja stseenide tuvastamiseks.", + "generate_previews_during_scan": "Genereeri animeeritud eelvaateid", + "generate_previews_during_scan_tooltip": "Genereeri animeeritud WebP eelvaateid, nõutav ainult siis, kui eelvaate tüüp on seatud väärtusele Animeeritud Pilt.", + "generate_sprites_during_scan": "Genereeri puhastusspriite", + "generate_thumbnails_during_scan": "Genereeri piltide jaoks pisipilte", + "generate_video_covers_during_scan": "Genereeri stseeni kaanepidid", + "generate_video_previews_during_scan": "Genereeri eelvaateid", + "generate_video_previews_during_scan_tooltip": "Genereeri video eelvaateid, mis esitatakse kursorit stseeni kohal hoides", + "generated_content": "Genereeritud Sisu", + "identify": { + "and_create_missing": "ja loo puuduv", + "create_missing": "Loo puuduv", + "default_options": "Vaikesätted", + "description": "Stseeni metaandmete automaatne määramine stash-kasti ja kraapija allikate abil.", + "explicit_set_description": "Kui allikaspetsiifilistes suvandites neid ei alistata, kasutatakse järgmisi valikuid.", + "field": "Väli", + "field_behaviour": "{strategy} {field}", + "field_options": "Välja Valikud", + "heading": "Tuvastamine", + "identifying_from_paths": "Stseenide tuvastamine järgmistelt failiteedelt", + "identifying_scenes": "Tuvastan {num} {scene}", + "include_male_performers": "Kaasa meesnäitlejaid", + "set_cover_images": "Määra kaanepildid", + "set_organized": "Määra organiseeritud silt", + "source": "Allikas", + "source_options": "{source} Sätted", + "sources": "Allikad", + "strategy": "Strateegia" + }, + "import_from_exported_json": "Impordi eksporditud JSON-ist metaandmete kataloogist. Kustutab olemasoleva andmebaasi.", + "incremental_import": "Järkjärguline import esitatud eksporditud ZIP-failist.", + "job_queue": "Ülesannete Järjekord", + "maintenance": "Hooldus", + "migrate_blobs": { + "delete_old": "Kustuta vanad andmed", + "description": "Migreeri blobid praegusele blobi andmesüsteemile. Seeda migratsiooni tuleb jooksutada peale blobi andmesalvestussüsteemi muutmist. Saab valikuliselt kustutada vanad andmed peale migratsiooni." + }, + "migrate_hash_files": "Kasutatakse pärast genereeritud faili nimetamise hashi muutmist olemasolevate loodud failide ümbernimetamiseks uuele hashivormingule.", + "migrate_scene_screenshots": { + "delete_files": "Kustuta ekraanipiltide failid", + "description": "Migreeri stseeni ekranipildid uute blobi andmesüsteemi. Seda migratsuiooni peaks jooksutama peale olemasoleva süsteemi migreerimist 0.20le. Saab valikuselt kustutada vanu ekraanipilte peale migratsiooni.", + "overwrite_existing": "Kirjuta üle eksisteerivad blobid koos ekraanipiltide andmetega" + }, + "migrations": "Migreerimised", + "only_dry_run": "Tee ainult kuivjooks. Ära eemalda midagi", + "plugin_tasks": "Plugina Ülesanded", + "scan": { + "scanning_all_paths": "Kõikide failiteede skaneerimine", + "scanning_paths": "Järgnevate failiteede skaneerimine" + }, + "scan_for_content_desc": "Skaneeri uue sisu leidmiseks ja andmebaasi lisamiseks.", + "set_name_date_details_from_metadata_if_present": "Määra nimi, kuupäev, detailid failisisestest metaandmetest" }, - "transcode": { - "input_args": { - "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi sisendväljale video genereerimisel.", - "heading": "FFmpeg Transkodeerimise Sisendargumendid" - }, - "output_args": { - "desc": "Edasijõudnutele: Lisaargumendid mida edastada ffmpegi väljundväljale video genereerimisel.", - "heading": "FFmpeg Transkodeerimise Väljundargumendid" - } + "tools": { + "scene_duplicate_checker": "Duplikaatstseenide Kontroll", + "scene_filename_parser": { + "add_field": "Lisa Väli", + "capitalize_title": "Pealkirja kirjutamine suurtähtedega", + "display_fields": "Kuva väljad", + "escape_chars": "Literaalsete märkide vältimiseks kasutage \\", + "filename": "Failinimi", + "filename_pattern": "Failinime Muster", + "ignore_organized": "Ignoreeri organiseeritud stseene", + "ignored_words": "Ignoreeritud sõnad", + "matches_with": "Kattub {i}-ga", + "select_parser_recipe": "Valige Parser-i Retsept", + "title": "Stseeni Failinimede Parser", + "whitespace_chars": "Tühikumärgid", + "whitespace_chars_desc": "Need märgid asendatakse pealkirjas tühikutega" + }, + "scene_tools": "Stseeni Tööriistad" + }, + "ui": { + "abbreviate_counters": { + "description": "Lühenda loendureid kaartidel ja üksikasjade vaatamise lehtedel, näiteks \"1831\" vormindatakse kujule \"1,8K\".", + "heading": "Loendurite lühendamine" + }, + "basic_settings": "Põhisätted", + "custom_css": { + "description": "Muudatuste jõustumiseks tuleb leht uuesti laadida. Pole garantiid, et kohandatud CSS töötab ka tuleviku Stashi uuendustes.", + "heading": "Kohandatud CSS", + "option_label": "Kohandatud CSS lubatud" + }, + "custom_javascript": { + "description": "Pead muudatuste nägemiseks lehe uuesti laadima. Pole garantiid, et kohandatud Javascript töötab ka tuleviku Stashi uuendustes.", + "heading": "Kohandatud Javascript", + "option_label": "Kohandatud Javascript lubatud" + }, + "custom_locales": { + "description": "Muutke üksikuid keele stringe. Põhiloendi leiate aadressilt https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/locales/en-GB.json. Muudatuste jõustumiseks tuleb leht uuesti laadida.", + "heading": "Kohandatud tõlge", + "option_label": "Kohandatud tõlge lubatud" + }, + "delete_options": { + "description": "Vaikesätted piltide, galeriide ja stseenide kustutamisel.", + "heading": "Kustutamissätted", + "options": { + "delete_file": "Kustuta fail vaikesättena", + "delete_generated_supporting_files": "Kustuta genereeritud toetusfailid vaikesättena" + } + }, + "desktop_integration": { + "desktop_integration": "Töölaua Integratsioon", + "notifications_enabled": "Luba Teavitused", + "send_desktop_notifications_for_events": "Saada töölaua teated sündmuste korral", + "skip_opening_browser": "Jäta Brauseri Avamine Vahele", + "skip_opening_browser_on_startup": "Jäta käivitamise ajal brauseri automaatne avamine vahele" + }, + "editing": { + "disable_dropdown_create": { + "description": "Eemaldage rippmenüü valijatest võimalus luua uusi objekte", + "heading": "Keela rippmenüü loomine" + }, + "heading": "Redigeerimine", + "max_options_shown": { + "label": "Maksimaalne number esemeid mida näidata valikmenüüdes" + }, + "rating_system": { + "star_precision": { + "label": "Tähtedega Hindamise Täpsus", + "options": { + "full": "Täis", + "half": "Pool", + "quarter": "Neljandik", + "tenth": "Kümme" + } + }, + "type": { + "label": "Hindamissüsteemi Tüüp", + "options": { + "decimal": "Komakohaga", + "stars": "Tähed" + } + } + } + }, + "funscript_offset": { + "description": "Interaktiivsete skriptide taasesituse aja nihe millisekundites.", + "heading": "Funscripti nihe (ms)" + }, + "handy_connection": { + "connect": "Ühenda", + "server_offset": { + "heading": "Serveri Nihe" + }, + "status": { + "heading": "Handy Ühenduse Staatus" + }, + "sync": "Sünkroniseeri" + }, + "handy_connection_key": { + "description": "Handy ühendusvõti interaktiivsete stseenide jaoks. Selle määramine võimaldab Stashil jagada teie praeguse stseeni teavet saidiga handyfeeling.com", + "heading": "Handy Ühendusvõti" + }, + "image_lightbox": { + "heading": "Pildi Valguskast" + }, + "image_wall": { + "direction": "Direktsioon", + "heading": "Pildisein", + "margin": "Marginaalid (pikslid)" + }, + "images": { + "heading": "Pildid", + "options": { + "write_image_thumbnails": { + "description": "Kirjuta piltide pisipildid kettale, kui need luuakse käigupealt", + "heading": "Kirjutage piltide pisipildid" + } + } + }, + "interactive_options": "Interaktiivsed Valikud", + "language": { + "heading": "Keel" + }, + "max_loop_duration": { + "description": "Stseeni maksimaalne kestus, mille jooksul stseenimängija videot uuesti mängib – sisesta 0 keelamiseks", + "heading": "Silmuse maksimaalne kestus" + }, + "menu_items": { + "description": "Saad navigeerimisribal kuvada või peita erinevat tüüpi sisu", + "heading": "Menüüelemendid" + }, + "minimum_play_percent": { + "description": "Aja protsent, kui kaua stseeni tuleb esitada, enne kui selle esitamiste arvu suurendatakse.", + "heading": "Minimaalne Esitusprotsent" + }, + "performers": { + "options": { + "image_location": { + "description": "Esitaja vaikekujutiste kohandatud failitee. Sisseehitatud vaikeseadete kasutamiseks jätke tühjaks", + "heading": "Kohandatud Esinejate Pilditee" + } + } + }, + "preview_type": { + "description": "Seinaelementide konfiguratsioon", + "heading": "Eelvaate Tüüp", + "options": { + "animated": "Animeeritud Pilt", + "static": "Staatiline Pilt", + "video": "Video" + } + }, + "scene_list": { + "heading": "Stseenide Nimekiri", + "options": { + "show_studio_as_text": "Näita Stuudioid tekstina" + } + }, + "scene_player": { + "heading": "Stseenimängija", + "options": { + "always_start_from_beginning": "Alusta videot alati algusest", + "auto_start_video": "Video automaatne alustamine", + "auto_start_video_on_play_selected": { + "description": "Stseenivideote automaatne esitamine järjekorrast esitamisel või stseenide lehelt valitud või juhusliku esitamise korral", + "heading": "Video automaatne esitamine valitud esitamisel" + }, + "continue_playlist_default": { + "description": "Kui video lõppeb, esitage järjekorras järgmine stseen", + "heading": "Esitusloendi jätkamine vaikimisi" + }, + "show_scrubber": "Näita Detailide Otsijat", + "track_activity": "Jälgi Tegevust" + } + }, + "scene_wall": { + "heading": "Stseenide/Markerite Sein", + "options": { + "display_title": "Kuva pealkiri ja sildid", + "toggle_sound": "Luba heli" + } + }, + "scroll_attempts_before_change": { + "description": "Kerimise katsete arv enne järgmise/eelmise üksuse juurde liikumist. Kehtib ainult Pan Y kerimisrežiimi puhul.", + "heading": "Kerimiskatsed enne üleminekut" + }, + "show_tag_card_on_hover": { + "description": "Kuva märgendi kaarti, kui hõljute sildi märkidel", + "heading": "Sildikaardi tööriistanäpunäited" + }, + "slideshow_delay": { + "description": "Slaidiseanss on galeriides seinavaaterežiimi korral saadaval", + "heading": "Slaidiseansi Viivitus (sekundites)" + }, + "studio_panel": { + "heading": "Studiovaade", + "options": { + "show_child_studio_content": { + "description": "Stuudiovaates kuvage ka alamstuudiote sisu", + "heading": "Kuvage alamstuudiote sisu" + } + } + }, + "tag_panel": { + "heading": "Sildivaade", + "options": { + "show_child_tagged_content": { + "description": "Sildivaates kuvage ka alammärgendite sisu", + "heading": "Kuva alamsildi sisu" + } + } + }, + "title": "Kasutajaliides" } - }, - "funscript_heatmap_draw_range": "Kaasa vahemik genereeritud kuumkaartidel", - "funscript_heatmap_draw_range_desc": "Joonista liikumisvahemik genereeritud kuumkaardi y-teljel. Olemasolevad kuumkaardid tuleb peale muutmist uuesti genereerida.", - "gallery_cover_regex_desc": "Regexp kasutakse, et tuvastada pilti kui galerii kaanepildina", - "gallery_cover_regex_label": "Galerii kaanepildi muster", - "gallery_ext_desc": "Komadega eraldatud faililaiendite loend, mis tuvastatakse galerii ZIP-failidena.", - "gallery_ext_head": "Galerii zip Laiendused", - "generated_file_naming_hash_desc": "Kasutage failide nimetamiseks MD5 või oshashi. Selle muutmiseks on vaja, et kõikides stseenides oleks kohaldatav MD5/oshash väärtus täidetud. Pärast selle väärtuse muutmist tuleb olemasolevad loodud failid migreerida või uuesti genereerida. Vaadake üleviimise kohta lehekülge Ülesanded.", - "generated_file_naming_hash_head": "Genereeritud faili nimetamise hash", - "generated_files_location": "Loodud failide (stseenimarkerid, stseeni eelvaated, spraidid jne) asukoht Failiteel", - "generated_path_head": "Genereeritud Failitee", - "hashing": "Hashimine", - "heatmap_generation": "Funscripti Kuumkaardi Genereerimine", - "image_ext_desc": "Komadega eraldatud faililaiendite loend, mis tuvastatakse piltidena.", - "image_ext_head": "Pildilaiendused", - "include_audio_desc": "Kaasa eelvaadete loomisel helivoog.", - "include_audio_head": "Kaasa heli", - "logging": "Logimine", - "maximum_streaming_transcode_size_desc": "Transkodeeritud voogude maksimaalne suurus", - "maximum_streaming_transcode_size_head": "Maksimaalne voogesituse ümberkodeerimise suurus", - "maximum_transcode_size_desc": "Loodud ümberkoodimiste maksimaalne suurus", - "maximum_transcode_size_head": "Maksimaalne ümberkodeerimise suurus", - "metadata_path": { - "description": "Kataloogi asukoht, mida kasutatakse täieliku ekspordi või impordi teostamisel", - "heading": "Metaandmete Failitee" - }, - "number_of_parallel_task_for_scan_generation_desc": "Automaatse tuvastamise jaoks määra 0. Hoiatus, kui tehakse rohkem toiminguid, kui on vaja 100% protsessori kasutuse saavutamiseks, väheneb jõudlus ja võib esineda muid probleeme.", - "number_of_parallel_task_for_scan_generation_head": "Paralleelsete skaneerimise/genereerimise ülesannete arv", - "parallel_scan_head": "Paralleelne Skaneerimine/Generatsioon", - "preview_generation": "Eelvaate Genereerimine", - "python_path": { - "description": "Pythoni käivitataja asukoht. Kasutatakse skriptipõhiste kraapijate ja pluginate jaoks. Kui see on tühi, lahendatakse python keskkonnast", - "heading": "Pythoni Failitee" - }, - "scraper_user_agent": "Kraapija Kasutajaagent", - "scraper_user_agent_desc": "Kasutajaagendi string, mida kasutatakse kraapimise HTTP-päringute käigus", - "scrapers_path": { - "description": "Failitee kraapijate sättefailide jaoks", - "heading": "Kraapijate Failitee" - }, - "scraping": "Kraapimine", - "sqlite_location": "Failitee asukoht SQLite andmebaasi jaoks (vajab taaskäivitust). HOIATUS: andmebaasi kasutamine, mis ei jookse samal süsteemil, kui Stash (nt üle võrgu) ei ole toetatud!", - "video_ext_desc": "Komadega eraldatud faililaiendite loend, mis tuvastatakse videotena.", - "video_ext_head": "Videolaiendused", - "video_head": "Video" - }, - "library": { - "exclusions": "Välistused", - "gallery_and_image_options": "Galerii ja Piltide sätted", - "media_content_extensions": "Media sisu laiendused" - }, - "logs": { - "log_level": "Logimise tase" }, - "plugins": { - "hooks": "Hookid", - "triggers_on": "Sisselülitatud päästikud" + "configuration": "Konfiguratsioon", + "countables": { + "files": "{count, plural, one {Fail} other {Faili}}", + "galleries": "{count, plural, one {Galerii} other {Galleriid}}", + "images": "{count, plural, one {Pilt} other {Pilti}}", + "markers": "{count, plural, one {Marker} other {Markerit}}", + "movies": "{count, plural, one {Film} other {Filmi}}", + "performers": "{count, plural, one {Näitleja} other {Näitlejat}}", + "scenes": "{count, plural, one {Stseen} other {Stseeni}}", + "studios": "{count, plural, one {Stuudio} other {Stuudiot}}", + "tags": "{count, plural, one {Silt} other {Silti}}" }, - "scraping": { - "entity_metadata": "{entityType} Metaandmed", - "entity_scrapers": "{entityType} kraapijad", - "excluded_tag_patterns_desc": "Sildinimede regexpid, mida kraapise tulemustest välja jätta", - "excluded_tag_patterns_head": "Välistatud Siltide Mustrid", - "scraper": "Kraapija", - "scrapers": "Kraapijad", - "search_by_name": "Otsi nime järgi", - "supported_types": "Toetatud tüübid", - "supported_urls": "URL-id" - }, - "stashbox": { - "add_instance": "Lisa stash-kasti eksemplar", - "api_key": "API võti", - "description": "Stash-box hõlbustab stseenide ja esinejate automaatset märgistamist sõrmejälgede ja failinimede põhjal.\nLõpp-punkti ja API võtme leiad oma konto lehelt stash-kasti eksemplaris. Nimed on nõutavad, kui lisatakse rohkem kui üks eksemplar.", - "endpoint": "Lõpp-punkt", - "graphql_endpoint": "GraphQL lõpp-punkt", - "name": "Nimi", - "title": "Stash-kasti Lõpp-punktid" + "country": "Riik", + "cover_image": "Kaanepilt", + "created_at": "Loodud", + "criterion": { + "greater_than": "Suurem kui", + "less_than": "Väiksem kui", + "value": "Väärtus" }, - "system": { - "transcoding": "Ümbertöötlemine" + "criterion_modifier": { + "between": "vahel", + "equals": "on", + "excludes": "välistab", + "format_string": "{criterion} {modifierString} {valueString}", + "greater_than": "on suurem kui", + "includes": "sisaldab", + "includes_all": "sisaldab kõiki", + "is_null": "on null", + "less_than": "on vähem kui", + "matches_regex": "katub regexiga", + "not_between": "ei ole vahemikus", + "not_equals": "ei ole", + "not_matches_regex": "ei kattu regexiga", + "not_null": "ei ole null" }, - "tasks": { - "added_job_to_queue": "{operation_name} lisatud tööde järjekorda", - "anonymise_and_download": "Loob anonüümse koopia andmebaasist ja laeb väljundfaili alla.", - "anonymise_database": "Loob andmebaasist tagavarakoopia tagavara koopiate kausta, muudab tundliku teabe anonüümseks. Seda saab jagada teistega abistamise ja probleemide analüüsimise eesmärgil. Originaalset andmebaasi ei muudeta. Anonüümne andmebaas kasutab failinime formaati {filename_format}.", - "anonymising_database": "Muudan andmebaasi anonüümseks", - "auto_tag": { - "auto_tagging_all_paths": "Automaatne Märkimine kõikidel failiteedel", - "auto_tagging_paths": "Automaatne Märkimine järgnevatel failiteedel" - }, - "auto_tag_based_on_filenames": "Automaatselt märgi sisu vastavalt failinimedele.", - "auto_tagging": "Automaatne Märkimine", - "backing_up_database": "Andmebaasi varundamine", - "backup_and_download": "Teeb andmebaasist varukoopia ja laadib saadud faili alla.", - "backup_database": "Varundab andmebaasi varundamise failiteele, failinimega vormingus {filename_format}", - "cleanup_desc": "Kontrolli puuduvaid faile ja eemalda need andmebaasist. See on hävitav tegevus.", - "data_management": "Andmehaldus", - "defaults_set": "Vaikesätted on määratud ja neid kasutatakse, kui klõpsate lehel Ülesanded nupul {action}.", - "dont_include_file_extension_as_part_of_the_title": "Ärge lisage pealkirja osana faililaiendit", - "empty_queue": "Praegu ei tööta ühtegi ülesannet.", - "export_to_json": "Ekspordib andmebaasi sisu metaandmete failiteele JSON-vormingus.", - "generate": { - "generating_from_paths": "Genereeri stseenide jaoks järgnevatelt failiteedelt", - "generating_scenes": "Genereerimine {num} {scene} jaoks" - }, - "generate_desc": "Genereeri toetavad pildi-, sprite-, video-, vtt- ja muud failid.", - "generate_phashes_during_scan": "Genereeri nähtavaid hashe", - "generate_phashes_during_scan_tooltip": "Duplikaatide eemaldamiseks ja stseenide tuvastamiseks.", - "generate_previews_during_scan": "Genereeri animeeritud eelvaateid", - "generate_previews_during_scan_tooltip": "Genereeri animeeritud WebP eelvaateid, nõutav ainult siis, kui eelvaate tüüp on seatud väärtusele Animeeritud Pilt.", - "generate_sprites_during_scan": "Genereeri puhastusspriite", - "generate_thumbnails_during_scan": "Genereeri piltide jaoks pisipilte", - "generate_video_covers_during_scan": "Genereeri stseeni kaanepidid", - "generate_video_previews_during_scan": "Genereeri eelvaateid", - "generate_video_previews_during_scan_tooltip": "Genereeri video eelvaateid, mis esitatakse kursorit stseeni kohal hoides", - "generated_content": "Genereeritud Sisu", - "identify": { - "and_create_missing": "ja loo puuduv", - "create_missing": "Loo puuduv", - "default_options": "Vaikesätted", - "description": "Stseeni metaandmete automaatne määramine stash-kasti ja kraapija allikate abil.", - "explicit_set_description": "Kui allikaspetsiifilistes suvandites neid ei alistata, kasutatakse järgmisi valikuid.", - "field": "Väli", - "field_behaviour": "{strategy} {field}", - "field_options": "Välja Valikud", - "heading": "Tuvastamine", - "identifying_from_paths": "Stseenide tuvastamine järgmistelt failiteedelt", - "identifying_scenes": "Tuvastan {num} {scene}", - "include_male_performers": "Kaasa meesnäitlejaid", - "set_cover_images": "Määra kaanepildid", - "set_organized": "Määra organiseeritud silt", - "source": "Allikas", - "source_options": "{source} Sätted", - "sources": "Allikad", - "strategy": "Strateegia" - }, - "import_from_exported_json": "Impordi eksporditud JSON-ist metaandmete kataloogist. Kustutab olemasoleva andmebaasi.", - "incremental_import": "Järkjärguline import esitatud eksporditud ZIP-failist.", - "job_queue": "Ülesannete Järjekord", - "maintenance": "Hooldus", - "migrate_blobs": { - "delete_old": "Kustuta vanad andmed", - "description": "Migreeri blobid praegusele blobi andmesüsteemile. Seeda migratsiooni tuleb jooksutada peale blobi andmesalvestussüsteemi muutmist. Saab valikuliselt kustutada vanad andmed peale migratsiooni." - }, - "migrate_hash_files": "Kasutatakse pärast genereeritud faili nimetamise hashi muutmist olemasolevate loodud failide ümbernimetamiseks uuele hashivormingule.", - "migrate_scene_screenshots": { - "delete_files": "Kustuta ekraanipiltide failid", - "description": "Migreeri stseeni ekranipildid uute blobi andmesüsteemi. Seda migratsuiooni peaks jooksutama peale olemasoleva süsteemi migreerimist 0.20le. Saab valikuselt kustutada vanu ekraanipilte peale migratsiooni.", - "overwrite_existing": "Kirjuta üle eksisteerivad blobid koos ekraanipiltide andmetega" - }, - "migrations": "Migreerimised", - "only_dry_run": "Tee ainult kuivjooks. Ära eemalda midagi", - "plugin_tasks": "Plugina Ülesanded", - "scan": { - "scanning_all_paths": "Kõikide failiteede skaneerimine", - "scanning_paths": "Järgnevate failiteede skaneerimine" - }, - "scan_for_content_desc": "Skaneeri uue sisu leidmiseks ja andmebaasi lisamiseks.", - "set_name_date_details_from_metadata_if_present": "Määra nimi, kuupäev, detailid failisisestest metaandmetest" - }, - "tools": { - "scene_duplicate_checker": "Duplikaatstseenide Kontroll", - "scene_filename_parser": { - "add_field": "Lisa Väli", - "capitalize_title": "Pealkirja kirjutamine suurtähtedega", - "display_fields": "Kuva väljad", - "escape_chars": "Literaalsete märkide vältimiseks kasutage \\", - "filename": "Failinimi", - "filename_pattern": "Failinime Muster", - "ignore_organized": "Ignoreeri organiseeritud stseene", - "ignored_words": "Ignoreeritud sõnad", - "matches_with": "Kattub {i}-ga", - "select_parser_recipe": "Valige Parser-i Retsept", - "title": "Stseeni Failinimede Parser", - "whitespace_chars": "Tühikumärgid", - "whitespace_chars_desc": "Need märgid asendatakse pealkirjas tühikutega" - }, - "scene_tools": "Stseeni Tööriistad" - }, - "ui": { - "abbreviate_counters": { - "description": "Lühenda loendureid kaartidel ja üksikasjade vaatamise lehtedel, näiteks \"1831\" vormindatakse kujule \"1,8K\".", - "heading": "Loendurite lühendamine" - }, - "basic_settings": "Põhisätted", - "custom_css": { - "description": "Muudatuste jõustumiseks tuleb leht uuesti laadida. Pole garantiid, et kohandatud CSS töötab ka tuleviku Stashi uuendustes.", - "heading": "Kohandatud CSS", - "option_label": "Kohandatud CSS lubatud" - }, - "custom_javascript": { - "description": "Pead muudatuste nägemiseks lehe uuesti laadima. Pole garantiid, et kohandatud Javascript töötab ka tuleviku Stashi uuendustes.", - "heading": "Kohandatud Javascript", - "option_label": "Kohandatud Javascript lubatud" - }, - "custom_locales": { - "description": "Muutke üksikuid keele stringe. Põhiloendi leiate aadressilt https://github.com/stashapp/stash/blob/develop/ui/v2.5/src/locales/en-GB.json. Muudatuste jõustumiseks tuleb leht uuesti laadida.", - "heading": "Kohandatud tõlge", - "option_label": "Kohandatud tõlge lubatud" - }, - "delete_options": { - "description": "Vaikesätted piltide, galeriide ja stseenide kustutamisel.", - "heading": "Kustutamissätted", - "options": { - "delete_file": "Kustuta fail vaikesättena", - "delete_generated_supporting_files": "Kustuta genereeritud toetusfailid vaikesättena" - } - }, - "desktop_integration": { - "desktop_integration": "Töölaua Integratsioon", - "notifications_enabled": "Luba Teavitused", - "send_desktop_notifications_for_events": "Saada töölaua teated sündmuste korral", - "skip_opening_browser": "Jäta Brauseri Avamine Vahele", - "skip_opening_browser_on_startup": "Jäta käivitamise ajal brauseri automaatne avamine vahele" - }, - "editing": { - "disable_dropdown_create": { - "description": "Eemaldage rippmenüü valijatest võimalus luua uusi objekte", - "heading": "Keela rippmenüü loomine" + "custom": "Kohandatud", + "date": "Kuupäev", + "date_format": "AAAA-KK-PP", + "datetime_format": "AAAA-KK-PP TT:MM", + "death_date": "Surmakuupäev", + "death_year": "Surma-aasta", + "descending": "Langev", + "description": "Kirjeldus", + "detail": "Detail", + "details": "Detailid", + "developmentVersion": "Arendusversioon", + "dialogs": { + "create_new_entity": "Loo uus {entity}", + "delete_alert": "Järgnev {count, plural, one {{singularEntity}} other {{pluralEntity}}} kustutatakse lõplikult:", + "delete_confirm": "Kas oled kindel, et soovid kustutada {entityName}?", + "delete_entity_desc": "{count, plural, one {Kas oled kindel, et soovid kustutada {singularEntity}? Kui sa faili ei kustutata, siis {singularEntity} lisatakse skaneerimise käigus uuesti.} other {Kas oled kindel, et soovid kustutada {pluralEntity}? Kui sa faile ei kustutata, siis {pluralEntity} lisatakse skaneerimise käigus uuesti.}}", + "delete_entity_simple_desc": "{count, plural, one {Kas oled kindel, et soovid kustutada {singularEntity}?} other {Kas oled kindel, et soovid kustutada {pluralEntity}?}}", + "delete_entity_title": "{count, plural, one {Kustuta {singularEntity}} other {Kustuta {pluralEntity}}}", + "delete_galleries_extra": "...lisaks kõik pildifailid, mida pole lisatud ühelegi teisele galeriile.", + "delete_gallery_files": "Kustutage galerii kaust/zip-fail ja kõik pildid, mis pole ühelegi teise galeriisse lisatud.", + "delete_object_desc": "Kas oled kindel, et soovid kustutada {count, plural, one {seda {singularEntity}} other {neid {pluralEntity}}}?", + "delete_object_overflow": "…ja {count} teist {count, plural, one {{singularEntity}} other {{pluralEntity}}}.", + "delete_object_title": "Kustuta {count, plural, one {{singularEntity}} other {{pluralEntity}}}", + "dont_show_until_updated": "Ära näita kuni järgmise värskenduseni", + "edit_entity_title": "Redigeeri {count, plural, one {{singularEntity}} other {{pluralEntity}}}", + "export_include_related_objects": "Kaasa seotud objektid eksporti", + "export_title": "Ekspordi", + "imagewall": { + "direction": { + "column": "Kolumn", + "description": "Kolumni või ridade põhine välimus.", + "row": "Rida" + }, + "margin_desc": "Marginaali pikslite arv ümber iga täispildi." }, - "heading": "Redigeerimine", - "max_options_shown": { - "label": "Maksimaalne number esemeid mida näidata valikmenüüdes" - }, - "rating_system": { - "star_precision": { - "label": "Tähtedega Hindamise Täpsus", - "options": { - "full": "Täis", - "half": "Pool", - "quarter": "Neljandik", - "tenth": "Kümme" - } - }, - "type": { - "label": "Hindamissüsteemi Tüüp", - "options": { - "decimal": "Komakohaga", - "stars": "Tähed" + "lightbox": { + "delay": "Viivitus (s)", + "display_mode": { + "fit_horizontally": "Mahuta horisontaalselt", + "fit_to_screen": "Mahuta ekraanile", + "label": "Kuvamisrežiim", + "original": "Originaal" + }, + "options": "Sätted", + "page_header": "Leht {page} / {total}", + "reset_zoom_on_nav": "Pildi muutumisel lähtesta suumi tase", + "scale_up": { + "description": "Suurendage väiksemaid pilte ekraani täitmiseks", + "label": "Suurenda ekraanile mahtumiseks" + }, + "scroll_mode": { + "description": "Teise režiimi ajutiselt kasutamiseks hoidke all shift klahvi.", + "label": "Kerimisrežiim", + "pan_y": "Liiguta Y", + "zoom": "Suum" } - } - } - }, - "funscript_offset": { - "description": "Interaktiivsete skriptide taasesituse aja nihe millisekundites.", - "heading": "Funscripti nihe (ms)" - }, - "handy_connection": { - "connect": "Ühenda", - "server_offset": { - "heading": "Serveri Nihe" }, - "status": { - "heading": "Handy Ühenduse Staatus" + "merge": { + "destination": "Siihtkoht", + "empty_results": "Sihtkoha välja väärtusi ei muudeta.", + "source": "Allikas" }, - "sync": "Sünkroniseeri" - }, - "handy_connection_key": { - "description": "Handy ühendusvõti interaktiivsete stseenide jaoks. Selle määramine võimaldab Stashil jagada teie praeguse stseeni teavet saidiga handyfeeling.com", - "heading": "Handy Ühendusvõti" - }, - "image_lightbox": { - "heading": "Pildi Valguskast" - }, - "image_wall": { - "direction": "Direktsioon", - "heading": "Pildisein", - "margin": "Marginaalid (pikslid)" - }, - "images": { - "heading": "Pildid", - "options": { - "write_image_thumbnails": { - "description": "Kirjuta piltide pisipildid kettale, kui need luuakse käigupealt", - "heading": "Kirjutage piltide pisipildid" - } - } - }, - "interactive_options": "Interaktiivsed Valikud", - "language": { - "heading": "Keel" - }, - "max_loop_duration": { - "description": "Stseeni maksimaalne kestus, mille jooksul stseenimängija videot uuesti mängib – sisesta 0 keelamiseks", - "heading": "Silmuse maksimaalne kestus" - }, - "menu_items": { - "description": "Saad navigeerimisribal kuvada või peita erinevat tüüpi sisu", - "heading": "Menüüelemendid" - }, - "minimum_play_percent": { - "description": "Aja protsent, kui kaua stseeni tuleb esitada, enne kui selle esitamiste arvu suurendatakse.", - "heading": "Minimaalne Esitusprotsent" - }, - "performers": { - "options": { - "image_location": { - "description": "Esitaja vaikekujutiste kohandatud failitee. Sisseehitatud vaikeseadete kasutamiseks jätke tühjaks", - "heading": "Kohandatud Esinejate Pilditee" - } - } - }, - "preview_type": { - "description": "Seinaelementide konfiguratsioon", - "heading": "Eelvaate Tüüp", - "options": { - "animated": "Animeeritud Pilt", - "static": "Staatiline Pilt", - "video": "Video" - } - }, - "scene_list": { - "heading": "Stseenide Nimekiri", - "options": { - "show_studio_as_text": "Näita Stuudioid tekstina" - } - }, - "scene_player": { - "heading": "Stseenimängija", - "options": { - "always_start_from_beginning": "Alusta videot alati algusest", - "auto_start_video": "Video automaatne alustamine", - "auto_start_video_on_play_selected": { - "description": "Stseenivideote automaatne esitamine järjekorrast esitamisel või stseenide lehelt valitud või juhusliku esitamise korral", - "heading": "Video automaatne esitamine valitud esitamisel" - }, - "continue_playlist_default": { - "description": "Kui video lõppeb, esitage järjekorras järgmine stseen", - "heading": "Esitusloendi jätkamine vaikimisi" - }, - "show_scrubber": "Näita Detailide Otsijat", - "track_activity": "Jälgi Tegevust" - } - }, - "scene_wall": { - "heading": "Stseenide/Markerite Sein", - "options": { - "display_title": "Kuva pealkiri ja sildid", - "toggle_sound": "Luba heli" - } - }, - "scroll_attempts_before_change": { - "description": "Kerimise katsete arv enne järgmise/eelmise üksuse juurde liikumist. Kehtib ainult Pan Y kerimisrežiimi puhul.", - "heading": "Kerimiskatsed enne üleminekut" - }, - "show_tag_card_on_hover": { - "description": "Kuva märgendi kaarti, kui hõljute sildi märkidel", - "heading": "Sildikaardi tööriistanäpunäited" - }, - "slideshow_delay": { - "description": "Slaidiseanss on galeriides seinavaaterežiimi korral saadaval", - "heading": "Slaidiseansi Viivitus (sekundites)" - }, - "studio_panel": { - "heading": "Studiovaade", - "options": { - "show_child_studio_content": { - "description": "Stuudiovaates kuvage ka alamstuudiote sisu", - "heading": "Kuvage alamstuudiote sisu" - } - } - }, - "tag_panel": { - "heading": "Sildivaade", - "options": { - "show_child_tagged_content": { - "description": "Sildivaates kuvage ka alammärgendite sisu", - "heading": "Kuva alamsildi sisu" - } - } - }, - "title": "Kasutajaliides" - } - }, - "configuration": "Konfiguratsioon", - "countables": { - "files": "{count, plural, one {Fail} other {Faili}}", - "galleries": "{count, plural, one {Galerii} other {Galleriid}}", - "images": "{count, plural, one {Pilt} other {Pilti}}", - "markers": "{count, plural, one {Marker} other {Markerit}}", - "movies": "{count, plural, one {Film} other {Filmi}}", - "performers": "{count, plural, one {Näitleja} other {Näitlejat}}", - "scenes": "{count, plural, one {Stseen} other {Stseeni}}", - "studios": "{count, plural, one {Stuudio} other {Stuudiot}}", - "tags": "{count, plural, one {Silt} other {Silti}}" - }, - "country": "Riik", - "cover_image": "Kaanepilt", - "created_at": "Loodud", - "criterion": { - "greater_than": "Suurem kui", - "less_than": "Väiksem kui", - "value": "Väärtus" - }, - "criterion_modifier": { - "between": "vahel", - "equals": "on", - "excludes": "välistab", - "format_string": "{criterion} {modifierString} {valueString}", - "greater_than": "on suurem kui", - "includes": "sisaldab", - "includes_all": "sisaldab kõiki", - "is_null": "on null", - "less_than": "on vähem kui", - "matches_regex": "katub regexiga", - "not_between": "ei ole vahemikus", - "not_equals": "ei ole", - "not_matches_regex": "ei kattu regexiga", - "not_null": "ei ole null" - }, - "custom": "Kohandatud", - "date": "Kuupäev", - "date_format": "AAAA-KK-PP", - "datetime_format": "AAAA-KK-PP TT:MM", - "death_date": "Surmakuupäev", - "death_year": "Surma-aasta", - "descending": "Langev", - "description": "Kirjeldus", - "detail": "Detail", - "details": "Detailid", - "developmentVersion": "Arendusversioon", - "dialogs": { - "create_new_entity": "Loo uus {entity}", - "delete_alert": "Järgnev {count, plural, one {{singularEntity}} other {{pluralEntity}}} kustutatakse lõplikult:", - "delete_confirm": "Kas oled kindel, et soovid kustutada {entityName}?", - "delete_entity_desc": "{count, plural, one {Kas oled kindel, et soovid kustutada {singularEntity}? Kui sa faili ei kustutata, siis {singularEntity} lisatakse skaneerimise käigus uuesti.} other {Kas oled kindel, et soovid kustutada {pluralEntity}? Kui sa faile ei kustutata, siis {pluralEntity} lisatakse skaneerimise käigus uuesti.}}", - "delete_entity_simple_desc": "{count, plural, one {Kas oled kindel, et soovid kustutada {singularEntity}?} other {Kas oled kindel, et soovid kustutada {pluralEntity}?}}", - "delete_entity_title": "{count, plural, one {Kustuta {singularEntity}} other {Kustuta {pluralEntity}}}", - "delete_galleries_extra": "...lisaks kõik pildifailid, mida pole lisatud ühelegi teisele galeriile.", - "delete_gallery_files": "Kustutage galerii kaust/zip-fail ja kõik pildid, mis pole ühelegi teise galeriisse lisatud.", - "delete_object_desc": "Kas oled kindel, et soovid kustutada {count, plural, one {seda {singularEntity}} other {neid {pluralEntity}}}?", - "delete_object_overflow": "…ja {count} teist {count, plural, one {{singularEntity}} other {{pluralEntity}}}.", - "delete_object_title": "Kustuta {count, plural, one {{singularEntity}} other {{pluralEntity}}}", - "dont_show_until_updated": "Ära näita kuni järgmise värskenduseni", - "edit_entity_title": "Redigeeri {count, plural, one {{singularEntity}} other {{pluralEntity}}}", - "export_include_related_objects": "Kaasa seotud objektid eksporti", - "export_title": "Ekspordi", - "imagewall": { - "direction": { - "column": "Kolumn", - "description": "Kolumni või ridade põhine välimus.", - "row": "Rida" - }, - "margin_desc": "Marginaali pikslite arv ümber iga täispildi." + "merge_tags": { + "destination": "Sihtkoht", + "source": "Allikas" + }, + "overwrite_filter_confirm": "Oled kindel, et tahad üle kirjutada juba eksisteerivat päringut {entityName}?", + "reassign_entity_title": "{count, plural, one {Määra Ümber {singularEntity}} other {Määra Ümber {pluralEntity}-d/id}}", + "reassign_files": { + "destination": "Määra Ümber" + }, + "scene_gen": { + "covers": "Stseeni kaaned", + "force_transcodes": "Sunni Ümbertöötlemise genereerimine", + "force_transcodes_tooltip": "Vaikimisi genereeritakse ümbertöötlemisi ainult siis, kui brauser videofaili ei toeta. Kui see on lubatud, genereeritakse ümbertöötlemisi isegi siis, kui videofail näib olevat brauseris toetatud.", + "image_previews": "Animeeritud Piltide Eelvaated", + "image_previews_tooltip": "Animeeritud WebP eelvaated, nõutavad ainult siis, kui Eelvaate Tüüp on seatud väärtusele Animeeritud Pilt.", + "interactive_heatmap_speed": "Looge interaktiivsete stseenide jaoks soojuskaarte ja kiirusi", + "marker_image_previews": "Markeri Animeeritud Piltide Eelvaated", + "marker_image_previews_tooltip": "Animeeritud markeri WebP eelvaated, nõutavad ainult siis, kui Eelvaate Tüüp on seatud väärtusele Animeeritud Pilt.", + "marker_screenshots": "Markeri Ekraanipildid", + "marker_screenshots_tooltip": "Marker staatilised JPG-kujutised, nõutav ainult siis, kui Eelvaate Tüüp on seatud väärtusele Staatiline Pilt.", + "markers": "Markeri Eelvaated", + "markers_tooltip": "20-sekundilised videod, mis algavad etteantud ajakoodiga.", + "override_preview_generation_options": "Eelvaate Genereerimise Valikute Ülekirjutamine", + "override_preview_generation_options_desc": "Eelvaate Genereerimise Sätete üle kirjutamine selle operatsiooni jaoks. Vaikeseaded määratakse jaotises Süsteem -> Eelvaate Genereerimine.", + "overwrite": "Kirjuta üle olemasolevad failid", + "phash": "Nähtavad hashid (duplikaatide eemaldamiseks)", + "preview_exclude_end_time_desc": "Välista stseeni eelvaadetest viimased x sekundid. See võib olla väärtus sekundites või protsent (nt 2%) stseeni kogukestusest.", + "preview_exclude_end_time_head": "Välista lõpuaeg", + "preview_exclude_start_time_desc": "Välista esimesed x sekundid stseeni eelvaadetest. See võib olla väärtus sekundites või protsent (nt 2%) stseeni kogukestusest.", + "preview_exclude_start_time_head": "Välista algusaeg", + "preview_generation_options": "Eelvaate Genereerimise Sätted", + "preview_options": "Eelvaate Sätted", + "preview_preset_desc": "Eelseadistus reguleerib eelvaate genereerimise suurust, kvaliteeti ja kodeerimisaega. Eelseadistused peale „aeglase” on väheneva tootlikkusega ja neid ei soovitata.", + "preview_preset_head": "Eelvaate kodeeringu eelseadistus", + "preview_seg_count_desc": "Eelvaatefailide segmentide arv.", + "preview_seg_count_head": "Eelvaates olevate segmentide arv", + "preview_seg_duration_desc": "Iga eelvaate lõigu kestus sekundites.", + "preview_seg_duration_head": "Eelvaate segmendi kestus", + "sprites": "Stseenipuhastuse Spraidid", + "sprites_tooltip": "Spraidid (stseenipuhasti jaoks)", + "transcodes": "Ümbertöötlemine", + "transcodes_tooltip": "Toetamata videovormingute MP4-konversioonid", + "video_previews": "Eelvaated", + "video_previews_tooltip": "Video eelvaated, mis esitatakse kursorit stseeni kohal hoides" + }, + "scenes_found": "Leiti {count} stseeni", + "scrape_entity_query": "{entity_type} Kraapija Päring", + "scrape_entity_title": "{entity_type} Kraapija Tulemused", + "scrape_results_existing": "Eksisteeriv", + "scrape_results_scraped": "Kraabitud", + "set_image_url_title": "Pildi URL", + "unsaved_changes": "Salvestamata muudatused. Kas soovid kindlasti lahkuda?" }, - "lightbox": { - "delay": "Viivitus (s)", - "display_mode": { - "fit_horizontally": "Mahuta horisontaalselt", - "fit_to_screen": "Mahuta ekraanile", - "label": "Kuvamisrežiim", - "original": "Originaal" - }, - "options": "Sätted", - "page_header": "Leht {page} / {total}", - "reset_zoom_on_nav": "Pildi muutumisel lähtesta suumi tase", - "scale_up": { - "description": "Suurendage väiksemaid pilte ekraani täitmiseks", - "label": "Suurenda ekraanile mahtumiseks" - }, - "scroll_mode": { - "description": "Teise režiimi ajutiselt kasutamiseks hoidke all shift klahvi.", - "label": "Kerimisrežiim", - "pan_y": "Liiguta Y", - "zoom": "Suum" - } + "dimensions": "Dimensioonid", + "director": "Režissöör", + "disambiguation": "Ühesõnastamine", + "display_mode": { + "grid": "Võrgustik", + "list": "Nimekiri", + "tagger": "Sildistaja", + "unknown": "Teadmata", + "wall": "Sein" }, - "merge": { - "destination": "Siihtkoht", - "empty_results": "Sihtkoha välja väärtusi ei muudeta.", - "source": "Allikas" + "donate": "Anneta", + "dupe_check": { + "description": "Täpsetest madalamate tasemete arvutamine võib võtta kauem aega. Valepositiivsed tulemused võidakse tagastada ka madalamal täpsustasemel.", + "duration_diff": "Maksimaalse Pikkuse Vahe", + "duration_options": { + "any": "Kõik", + "equal": "Võrdne" + }, + "found_sets": "{setCount, plural, one{# duplikaat leitud.} other {# duplikaati leitud.}}", + "options": { + "exact": "Täpselt", + "high": "Kõrge", + "low": "Madal", + "medium": "Keskmine" + }, + "search_accuracy_label": "Otsingu Täpsus", + "title": "Duplikaatstseenid" }, - "merge_tags": { - "destination": "Sihtkoht", - "source": "Allikas" + "duplicated_phash": "Duplikeeritud (phash)", + "duration": "Pikkus", + "effect_filters": { + "aspect": "Aspekt", + "blue": "Sinine", + "blur": "Hägusta", + "brightness": "Eredus", + "contrast": "Kontrast", + "gamma": "Gamma", + "green": "Roheline", + "hue": "Värvitoon", + "name": "Filtrid", + "name_transforms": "Muutused", + "red": "Punane", + "reset_filters": "Lähtesta Filtrid", + "reset_transforms": "Lähesta Muutused", + "rotate": "Pööra", + "rotate_left_and_scale": "Pööra Vasakule & Skaleeri", + "rotate_right_and_scale": "Pööra Paremale & Skaleeri", + "saturation": "Saturatsioon", + "scale": "Suurus", + "warmth": "Soojus" }, - "overwrite_filter_confirm": "Oled kindel, et tahad üle kirjutada juba eksisteerivat päringut {entityName}?", - "reassign_entity_title": "{count, plural, one {Määra Ümber {singularEntity}} other {Määra Ümber {pluralEntity}-d/id}}", - "reassign_files": { - "destination": "Määra Ümber" + "empty_server": "Sellel lehel soovituste nägemiseks lisage oma serverisse mõned stseenid.", + "errors": { + "image_index_greater_than_zero": "Pildi indeks peab olema suurem kui 0", + "lazy_component_error_help": "Kui uuendasid Stashi hiljuti, palun taaslae leht või tühjenda oma brauseri cache.", + "something_went_wrong": "Midagi läks valesti." }, - "scene_gen": { - "covers": "Stseeni kaaned", - "force_transcodes": "Sunni Ümbertöötlemise genereerimine", - "force_transcodes_tooltip": "Vaikimisi genereeritakse ümbertöötlemisi ainult siis, kui brauser videofaili ei toeta. Kui see on lubatud, genereeritakse ümbertöötlemisi isegi siis, kui videofail näib olevat brauseris toetatud.", - "image_previews": "Animeeritud Piltide Eelvaated", - "image_previews_tooltip": "Animeeritud WebP eelvaated, nõutavad ainult siis, kui Eelvaate Tüüp on seatud väärtusele Animeeritud Pilt.", - "interactive_heatmap_speed": "Looge interaktiivsete stseenide jaoks soojuskaarte ja kiirusi", - "marker_image_previews": "Markeri Animeeritud Piltide Eelvaated", - "marker_image_previews_tooltip": "Animeeritud markeri WebP eelvaated, nõutavad ainult siis, kui Eelvaate Tüüp on seatud väärtusele Animeeritud Pilt.", - "marker_screenshots": "Markeri Ekraanipildid", - "marker_screenshots_tooltip": "Marker staatilised JPG-kujutised, nõutav ainult siis, kui Eelvaate Tüüp on seatud väärtusele Staatiline Pilt.", - "markers": "Markeri Eelvaated", - "markers_tooltip": "20-sekundilised videod, mis algavad etteantud ajakoodiga.", - "override_preview_generation_options": "Eelvaate Genereerimise Valikute Ülekirjutamine", - "override_preview_generation_options_desc": "Eelvaate Genereerimise Sätete üle kirjutamine selle operatsiooni jaoks. Vaikeseaded määratakse jaotises Süsteem -> Eelvaate Genereerimine.", - "overwrite": "Kirjuta üle olemasolevad failid", - "phash": "Nähtavad hashid (duplikaatide eemaldamiseks)", - "preview_exclude_end_time_desc": "Välista stseeni eelvaadetest viimased x sekundid. See võib olla väärtus sekundites või protsent (nt 2%) stseeni kogukestusest.", - "preview_exclude_end_time_head": "Välista lõpuaeg", - "preview_exclude_start_time_desc": "Välista esimesed x sekundid stseeni eelvaadetest. See võib olla väärtus sekundites või protsent (nt 2%) stseeni kogukestusest.", - "preview_exclude_start_time_head": "Välista algusaeg", - "preview_generation_options": "Eelvaate Genereerimise Sätted", - "preview_options": "Eelvaate Sätted", - "preview_preset_desc": "Eelseadistus reguleerib eelvaate genereerimise suurust, kvaliteeti ja kodeerimisaega. Eelseadistused peale „aeglase” on väheneva tootlikkusega ja neid ei soovitata.", - "preview_preset_head": "Eelvaate kodeeringu eelseadistus", - "preview_seg_count_desc": "Eelvaatefailide segmentide arv.", - "preview_seg_count_head": "Eelvaates olevate segmentide arv", - "preview_seg_duration_desc": "Iga eelvaate lõigu kestus sekundites.", - "preview_seg_duration_head": "Eelvaate segmendi kestus", - "sprites": "Stseenipuhastuse Spraidid", - "sprites_tooltip": "Spraidid (stseenipuhasti jaoks)", - "transcodes": "Ümbertöötlemine", - "transcodes_tooltip": "Toetamata videovormingute MP4-konversioonid", - "video_previews": "Eelvaated", - "video_previews_tooltip": "Video eelvaated, mis esitatakse kursorit stseeni kohal hoides" + "ethnicity": "Rahvus", + "existing_value": "eksisteeriv väärtus", + "eye_color": "Silmavärv", + "fake_tits": "Võltsrinnad", + "false": "Väär", + "favourite": "Lemmik", + "file": "fail", + "file_count": "Failide Arv", + "file_info": "Faili Info", + "file_mod_time": "Faili Muutmise Aeg", + "files": "failid", + "files_amount": "{value} faili", + "filesize": "Faili Suurus", + "filter": "Filter", + "filter_name": "Filtri nimi", + "filters": "Filtrid", + "folder": "Kaust", + "framerate": "Kaadrisagedus", + "frames_per_second": "{value} kaadrit sekundis", + "front_page": { + "types": { + "premade_filter": "Eelsätestatud Filter", + "saved_filter": "Salvestatud Filter" + } }, - "scenes_found": "Leiti {count} stseeni", - "scrape_entity_query": "{entity_type} Kraapija Päring", - "scrape_entity_title": "{entity_type} Kraapija Tulemused", - "scrape_results_existing": "Eksisteeriv", - "scrape_results_scraped": "Kraabitud", - "set_image_url_title": "Pildi URL", - "unsaved_changes": "Salvestamata muudatused. Kas soovid kindlasti lahkuda?" - }, - "dimensions": "Dimensioonid", - "director": "Režissöör", - "disambiguation": "Ühesõnastamine", - "display_mode": { - "grid": "Võrgustik", - "list": "Nimekiri", - "tagger": "Sildistaja", - "unknown": "Teadmata", - "wall": "Sein" - }, - "donate": "Anneta", - "dupe_check": { - "description": "Täpsetest madalamate tasemete arvutamine võib võtta kauem aega. Valepositiivsed tulemused võidakse tagastada ka madalamal täpsustasemel.", - "duration_diff": "Maksimaalse Pikkuse Vahe", - "duration_options": { - "any": "Kõik", - "equal": "Võrdne" + "galleries": "Galeriid", + "gallery": "Galerii", + "gallery_count": "Galeriide Arv", + "gender": "Sugu", + "gender_types": { + "FEMALE": "Naine", + "INTERSEX": "Intersooline", + "MALE": "Mees", + "NON_BINARY": "Mittebinaarne", + "TRANSGENDER_FEMALE": "Transnaine", + "TRANSGENDER_MALE": "Transmees" }, - "found_sets": "{setCount, plural, one{# duplikaat leitud.} other {# duplikaati leitud.}}", - "options": { - "exact": "Täpselt", - "high": "Kõrge", - "low": "Madal", - "medium": "Keskmine" + "hair_color": "Juuksevärv", + "handy_connection_status": { + "connecting": "Ühendan", + "disconnected": "Lahti ühendatud", + "error": "Handyga ühendamisel tekkis viga", + "missing": "Kadunud", + "ready": "Valmis", + "syncing": "Serveriga sünkroniseerimine", + "uploading": "Skripti üleslaadimine" }, - "search_accuracy_label": "Otsingu Täpsus", - "title": "Duplikaatstseenid" - }, - "duplicated_phash": "Duplikeeritud (phash)", - "duration": "Pikkus", - "effect_filters": { - "aspect": "Aspekt", - "blue": "Sinine", - "blur": "Hägusta", - "brightness": "Eredus", - "contrast": "Kontrast", - "gamma": "Gamma", - "green": "Roheline", - "hue": "Värvitoon", - "name": "Filtrid", - "name_transforms": "Muutused", - "red": "Punane", - "reset_filters": "Lähtesta Filtrid", - "reset_transforms": "Lähesta Muutused", - "rotate": "Pööra", - "rotate_left_and_scale": "Pööra Vasakule & Skaleeri", - "rotate_right_and_scale": "Pööra Paremale & Skaleeri", - "saturation": "Saturatsioon", - "scale": "Suurus", - "warmth": "Soojus" - }, - "empty_server": "Sellel lehel soovituste nägemiseks lisage oma serverisse mõned stseenid.", - "errors": { - "image_index_greater_than_zero": "Pildi indeks peab olema suurem kui 0", - "lazy_component_error_help": "Kui uuendasid Stashi hiljuti, palun taaslae leht või tühjenda oma brauseri cache.", - "something_went_wrong": "Midagi läks valesti." - }, - "ethnicity": "Rahvus", - "existing_value": "eksisteeriv väärtus", - "eye_color": "Silmavärv", - "fake_tits": "Võltsrinnad", - "false": "Väär", - "favourite": "Lemmik", - "file": "fail", - "file_count": "Failide Arv", - "file_info": "Faili Info", - "file_mod_time": "Faili Muutmise Aeg", - "files": "failid", - "files_amount": "{value} faili", - "filesize": "Faili Suurus", - "filter": "Filter", - "filter_name": "Filtri nimi", - "filters": "Filtrid", - "folder": "Kaust", - "framerate": "Kaadrisagedus", - "frames_per_second": "{value} kaadrit sekundis", - "front_page": { - "types": { - "premade_filter": "Eelsätestatud Filter", - "saved_filter": "Salvestatud Filter" - } - }, - "galleries": "Galeriid", - "gallery": "Galerii", - "gallery_count": "Galeriide Arv", - "gender": "Sugu", - "gender_types": { - "FEMALE": "Naine", - "INTERSEX": "Intersooline", - "MALE": "Mees", - "NON_BINARY": "Mittebinaarne", - "TRANSGENDER_FEMALE": "Transnaine", - "TRANSGENDER_MALE": "Transmees" - }, - "hair_color": "Juuksevärv", - "handy_connection_status": { - "connecting": "Ühendan", - "disconnected": "Lahti ühendatud", - "error": "Handyga ühendamisel tekkis viga", - "missing": "Kadunud", - "ready": "Valmis", - "syncing": "Serveriga sünkroniseerimine", - "uploading": "Skripti üleslaadimine" - }, - "hasChapters": "Sisaldab Episoode", - "hasMarkers": "On Markereid", - "height": "Pikkus", - "height_cm": "Pikkus (cm)", - "help": "Abi", - "ignore_auto_tag": "Ignoneeri Automaatset Märkimist", - "image": "Pilt", - "image_count": "Pildiarv", - "image_index": "Pilt #", - "images": "Pildid", - "include_parent_tags": "Kaasa vanem-silte", - "include_sub_studios": "Kaasa tütarstuudioid", - "include_sub_tags": "Kaasa alamsilte", - "instagram": "Instagram", - "interactive": "Interaktiivne", - "interactive_speed": "Interaktiivne kiirus", - "isMissing": "On Kadunud", - "last_played_at": "Viimati Esitatud", - "library": "Kogu", - "loading": { - "generic": "Laen…" - }, - "marker_count": "Markerite Arv", - "markers": "Markerid", - "measurements": "Mõõdud", - "media_info": { - "audio_codec": "Heli Koodek", - "checksum": "Kontrollsumma", - "downloaded_from": "Allalaetud Asukohast", - "hash": "Hash", + "hasChapters": "Sisaldab Episoode", + "hasMarkers": "On Markereid", + "height": "Pikkus", + "height_cm": "Pikkus (cm)", + "help": "Abi", + "ignore_auto_tag": "Ignoneeri Automaatset Märkimist", + "image": "Pilt", + "image_count": "Pildiarv", + "image_index": "Pilt #", + "images": "Pildid", + "include_parent_tags": "Kaasa vanem-silte", + "include_sub_studios": "Kaasa tütarstuudioid", + "include_sub_tags": "Kaasa alamsilte", + "instagram": "Instagram", + "interactive": "Interaktiivne", "interactive_speed": "Interaktiivne kiirus", - "performer_card": { - "age": "{age} {years_old}", - "age_context": "{age} selles stseenis {years_old}" + "isMissing": "On Kadunud", + "last_played_at": "Viimati Esitatud", + "library": "Kogu", + "loading": { + "generic": "Laen…" }, - "phash": "PHash", - "play_count": "Esituste Arv", - "play_duration": "Esitamisaeg", - "stream": "Striim", - "video_codec": "Video Koodek" - }, - "megabits_per_second": "{value} megabitti sekundis", - "metadata": "Metaandmed", - "movie": "Film", - "movie_scene_number": "Filmi Stseeninumber", - "movies": "Filmid", - "name": "Nimi", - "new": "Uus", - "none": "Puudub", - "o_counter": "O-Loendur", - "operations": "Operatsioonid", - "organized": "Organiseeritud", - "pagination": { - "first": "Esimene", - "last": "Viimane", - "next": "Järgmine", - "previous": "Eelmine" - }, - "parent_of": "{children} vanem-silt", - "parent_studios": "Vanem-stuudiod", - "parent_tag_count": "Vanem-siltide Arv", - "parent_tags": "Vanem-sildid", - "part_of": "Osa {parent}-st", - "path": "Failitee", - "perceptual_similarity": "Tajutav Sarnasus (phash)", - "performer": "Näitleja", - "performerTags": "Näitleja Sildid", - "performer_age": "Näitleja Vanus", - "performer_count": "Näitlejate Arv", - "performer_favorite": "Lemmiknäitleja", - "performer_image": "Näitleja Pilt", - "performer_tagger": { - "add_new_performers": "Lisa Uusi Näitlejaid", - "any_names_entered_will_be_queried": "Kõigi sisestatud nimede kohta päritakse Stash-kastii kaugeksemplari ja lisatakse, kui need leitakse. Vastena loetakse ainult täpseid vasteid.", - "batch_add_performers": "Lisa Näitlejaid Hunnikus", - "batch_update_performers": "Uuenda Näitlejaid Hunnikus", - "config": { - "active_stash-box_instance": "Aktiivne stash-kasti eksemplar:", - "edit_excluded_fields": "Muuda Välistatud Välju", - "excluded_fields": "Välistatud väljad:", - "no_fields_are_excluded": "Ühtegi välja ei ole välistatud", - "no_instances_found": "Eksemplare ei leitud", - "these_fields_will_not_be_changed_when_updating_performers": "Neid välju näitlejaid uuendades ei muudeta." - }, - "current_page": "Käesolev lehekülg", - "failed_to_save_performer": "Näitleja \"{performer}\" salvestamine ebaõnnestus", - "name_already_exists": "Nimi juba eksisteerib", - "network_error": "Võrguviga", - "no_results_found": "Tulemusi ei leitud.", - "number_of_performers_will_be_processed": "{performer_count} näitlejat töödeldakse", - "performer_already_tagged": "Näitleja juba märgitud", - "performer_names_separated_by_comma": "Esinejate nimed on eraldatud komaga", - "performer_selection": "Näitlejate valik", - "performer_successfully_tagged": "Näitleja edukalt märgitud:", - "query_all_performers_in_the_database": "Kõik andmebaasis olevad näitlejad", - "refresh_tagged_performers": "Värskenda märgitud näitlejaid", - "refreshing_will_update_the_data": "Värskendamine uuendab kõigi märgitud näitlejate informatsiooni stash-kasti eksemplaris.", - "status_tagging_job_queued": "Staatus: Märkimise töö lisatud järjekorda", - "status_tagging_performers": "Staatus: Märgin näitlejaid", - "tag_status": "Märgi Staatus", - "to_use_the_performer_tagger": "Näitleja märgistamise kasutamiseks peab stash-kasti eksemplar olema konfigureeritud.", - "untagged_performers": "Märkimata näitlejad", - "update_performer": "Uuenda Näitlejat", - "update_performers": "Uuenda Näitlejaid", - "updating_untagged_performers_description": "Märgistamata esinejate värskendamisel püütakse leida vasteid esinejatele, kellel puudub stashid, ja värskendatakse metaandmeid." - }, - "performers": "Näitlejad", - "piercings": "Augustused", - "play_count": "Esitamisarv", - "play_duration": "Esitamisaeg", - "primary_file": "Põhifail", - "queue": "Järjekord", - "random": "Suvaline", - "rating": "Hinnang", - "recently_added_objects": "Hiljuti Lisatud {objects}", - "recently_released_objects": "Hiljuti Avaldatud {objects}", - "release_notes": "Väljalaske Märkmed", - "resolution": "Resolutsioon", - "resume_time": "Jätkamisaeg", - "scene": "Stseen", - "sceneTagger": "Stseeni Sildistaja", - "sceneTags": "Stseeni Sildid", - "scene_code": "Stuudio Kood", - "scene_count": "Stseenide Arv", - "scene_created_at": "Stseen Loodud", - "scene_date": "Stseeni Kuupäev", - "scene_id": "Stseeni ID", - "scene_updated_at": "Stseen Uuendatud", - "scenes": "Stseenid", - "scenes_updated_at": "Stseen Uuendatud", - "search_filter": { - "edit_filter": "Muuda Filtrit", - "name": "Filter", - "saved_filters": "Salvestatud filtrid", - "update_filter": "Uuenda Filtrit" - }, - "second": "Sekund", - "seconds": "Sekundit", - "settings": "Sätted", - "setup": { - "confirm": { - "almost_ready": "Oleme seadistamise lõpuleviimiseks peaaegu valmis. Vaata üle järgmised sätted. Valede väärtuste muutmiseks võite klõpsata tagasi. Kui kõik tundub õige, klõpsa süsteemi loomiseks nuppu Kinnita.", - "blobs_directory": "Binaarsete andmete kaust", - "cache_directory": "Cache kaust", - "configuration_file_location": "Konfiguratsioonifaili asukoht:", - "database_file_path": "Andmebaasi faili failitee", - "default_blobs_location": "", - "default_cache_location": "/cache", - "default_db_location": "/stash-go.sqlite", - "default_generated_content_location": "/generated", - "generated_directory": "Genereeritud kaust", - "nearly_there": "Peaaegu kohal!", - "stash_library_directories": "Stashi kogu kaustad" + "marker_count": "Markerite Arv", + "markers": "Markerid", + "measurements": "Mõõdud", + "media_info": { + "audio_codec": "Heli Koodek", + "checksum": "Kontrollsumma", + "downloaded_from": "Allalaetud Asukohast", + "hash": "Hash", + "interactive_speed": "Interaktiivne kiirus", + "performer_card": { + "age": "{age} {years_old}", + "age_context": "{age} selles stseenis {years_old}" + }, + "phash": "PHash", + "play_count": "Esituste Arv", + "play_duration": "Esitamisaeg", + "stream": "Striim", + "video_codec": "Video Koodek" }, - "creating": { - "creating_your_system": "Loon sulle süsteemi", - "ffmpeg_notice": "Kui ffmpeg-i pole veel olemas, ole kannatlik, kuni stash selle alla laadib. Allalaadimise edenemise vaatamiseks vaata konsooli väljundit." + "megabits_per_second": "{value} megabitti sekundis", + "metadata": "Metaandmed", + "movie": "Film", + "movie_scene_number": "Filmi Stseeninumber", + "movies": "Filmid", + "name": "Nimi", + "new": "Uus", + "none": "Puudub", + "o_counter": "O-Loendur", + "operations": "Operatsioonid", + "organized": "Organiseeritud", + "pagination": { + "first": "Esimene", + "last": "Viimane", + "next": "Järgmine", + "previous": "Eelmine" }, - "errors": { - "something_went_wrong": "Oi ei! Midagi läks valesti!", - "something_went_wrong_description": "Kui see näib olevat sisenditega seotud probleem, jätka ja klõpsa nende parandamiseks nuppu Tagasi. Vastasel juhul loo viga meie {githubLink}-s või otsi abi kanalist {discordLink}.", - "something_went_wrong_while_setting_up_your_system": "Süsteemi seadistamisel läks midagi valesti. Saime järgmise vea: {error}" + "parent_of": "{children} vanem-silt", + "parent_studios": "Vanem-stuudiod", + "parent_tag_count": "Vanem-siltide Arv", + "parent_tags": "Vanem-sildid", + "part_of": "Osa {parent}-st", + "path": "Failitee", + "perceptual_similarity": "Tajutav Sarnasus (phash)", + "performer": "Näitleja", + "performer_tags": "Näitleja Sildid", + "performer_age": "Näitleja Vanus", + "performer_count": "Näitlejate Arv", + "performer_favorite": "Lemmiknäitleja", + "performer_image": "Näitleja Pilt", + "performer_tagger": { + "add_new_performers": "Lisa Uusi Näitlejaid", + "any_names_entered_will_be_queried": "Kõigi sisestatud nimede kohta päritakse Stash-kastii kaugeksemplari ja lisatakse, kui need leitakse. Vastena loetakse ainult täpseid vasteid.", + "batch_add_performers": "Lisa Näitlejaid Hunnikus", + "batch_update_performers": "Uuenda Näitlejaid Hunnikus", + "config": { + "active_stash-box_instance": "Aktiivne stash-kasti eksemplar:", + "edit_excluded_fields": "Muuda Välistatud Välju", + "excluded_fields": "Välistatud väljad:", + "no_fields_are_excluded": "Ühtegi välja ei ole välistatud", + "no_instances_found": "Eksemplare ei leitud", + "these_fields_will_not_be_changed_when_updating_performers": "Neid välju näitlejaid uuendades ei muudeta." + }, + "current_page": "Käesolev lehekülg", + "failed_to_save_performer": "Näitleja \"{performer}\" salvestamine ebaõnnestus", + "name_already_exists": "Nimi juba eksisteerib", + "network_error": "Võrguviga", + "no_results_found": "Tulemusi ei leitud.", + "number_of_performers_will_be_processed": "{performer_count} näitlejat töödeldakse", + "performer_already_tagged": "Näitleja juba märgitud", + "performer_names_separated_by_comma": "Esinejate nimed on eraldatud komaga", + "performer_selection": "Näitlejate valik", + "performer_successfully_tagged": "Näitleja edukalt märgitud:", + "query_all_performers_in_the_database": "Kõik andmebaasis olevad näitlejad", + "refresh_tagged_performers": "Värskenda märgitud näitlejaid", + "refreshing_will_update_the_data": "Värskendamine uuendab kõigi märgitud näitlejate informatsiooni stash-kasti eksemplaris.", + "status_tagging_job_queued": "Staatus: Märkimise töö lisatud järjekorda", + "status_tagging_performers": "Staatus: Märgin näitlejaid", + "tag_status": "Märgi Staatus", + "to_use_the_performer_tagger": "Näitleja märgistamise kasutamiseks peab stash-kasti eksemplar olema konfigureeritud.", + "untagged_performers": "Märkimata näitlejad", + "update_performer": "Uuenda Näitlejat", + "update_performers": "Uuenda Näitlejaid", + "updating_untagged_performers_description": "Märgistamata esinejate värskendamisel püütakse leida vasteid esinejatele, kellel puudub stashid, ja värskendatakse metaandmeid." }, - "folder": { - "file_path": "Failitee", - "up_dir": "Kaust üles" + "performers": "Näitlejad", + "piercings": "Augustused", + "play_count": "Esitamisarv", + "play_duration": "Esitamisaeg", + "primary_file": "Põhifail", + "queue": "Järjekord", + "random": "Suvaline", + "rating": "Hinnang", + "recently_added_objects": "Hiljuti Lisatud {objects}", + "recently_released_objects": "Hiljuti Avaldatud {objects}", + "release_notes": "Väljalaske Märkmed", + "resolution": "Resolutsioon", + "resume_time": "Jätkamisaeg", + "scene": "Stseen", + "sceneTagger": "Stseeni Sildistaja", + "scene_tags": "Stseeni Sildid", + "scene_code": "Stuudio Kood", + "scene_count": "Stseenide Arv", + "scene_created_at": "Stseen Loodud", + "scene_date": "Stseeni Kuupäev", + "scene_id": "Stseeni ID", + "scene_updated_at": "Stseen Uuendatud", + "scenes": "Stseenid", + "scenes_updated_at": "Stseen Uuendatud", + "search_filter": { + "edit_filter": "Muuda Filtrit", + "name": "Filter", + "saved_filters": "Salvestatud filtrid", + "update_filter": "Uuenda Filtrit" }, - "github_repository": "Githubi hoidla", - "migrate": { - "backup_database_path_leave_empty_to_disable_backup": "Andmebaasi varundamise tee (varundamise keelamiseks jäta tühjaks):", - "backup_recommended": "Enne migreerimist on soovitatav olemasolev andmebaas varundada. Saame seda sinu eest teha, tehes andmebaasist koopia kausta {defaultBackupPath}.", - "migrating_database": "Andmebaasi migreerimine", - "migration_failed": "Migreerimine ebaõnnestus", - "migration_failed_error": "Andmebaasi migreerimisel ilmnes järgmine tõrge:", - "migration_failed_help": "Te vajalikud parandused ja proovige uuesti. Vastasel juhul ava viga meie {githubLink}-is või otsi abi kanalist {discordLink}.", - "migration_irreversible_warning": "Skeemi migratsiooniprotsess ei ole taastatav. Kui migratsioon on tehtud, ei ühildu andmebaas enam stashi varasemate versioonidega.", - "migration_notes": "Migratsioonimärkmed", - "migration_required": "Migratsioon on nõutud", - "perform_schema_migration": "Skeemi migreerimine", - "schema_too_old": "Praegune stashi andmebaas on skeemi {databaseSchema} versioonil ja see tuleb üle viia versioonile {appSchema}. See Stashi versioon ei tööta ilma andmebaasi migreerimata. Kui sa ei soovi migreerida, pead üle minema versioonile, mis vastab teie andmebaasi skeemile." + "second": "Sekund", + "seconds": "Sekundit", + "settings": "Sätted", + "setup": { + "confirm": { + "almost_ready": "Oleme seadistamise lõpuleviimiseks peaaegu valmis. Vaata üle järgmised sätted. Valede väärtuste muutmiseks võite klõpsata tagasi. Kui kõik tundub õige, klõpsa süsteemi loomiseks nuppu Kinnita.", + "blobs_directory": "Binaarsete andmete kaust", + "cache_directory": "Cache kaust", + "configuration_file_location": "Konfiguratsioonifaili asukoht:", + "database_file_path": "Andmebaasi faili failitee", + "default_blobs_location": "", + "default_cache_location": "/cache", + "default_db_location": "/stash-go.sqlite", + "default_generated_content_location": "/generated", + "generated_directory": "Genereeritud kaust", + "nearly_there": "Peaaegu kohal!", + "stash_library_directories": "Stashi kogu kaustad" + }, + "creating": { + "creating_your_system": "Loon sulle süsteemi", + "ffmpeg_notice": "Kui ffmpeg-i pole veel olemas, ole kannatlik, kuni stash selle alla laadib. Allalaadimise edenemise vaatamiseks vaata konsooli väljundit." + }, + "errors": { + "something_went_wrong": "Oi ei! Midagi läks valesti!", + "something_went_wrong_description": "Kui see näib olevat sisenditega seotud probleem, jätka ja klõpsa nende parandamiseks nuppu Tagasi. Vastasel juhul loo viga meie {githubLink}-s või otsi abi kanalist {discordLink}.", + "something_went_wrong_while_setting_up_your_system": "Süsteemi seadistamisel läks midagi valesti. Saime järgmise vea: {error}" + }, + "folder": { + "file_path": "Failitee", + "up_dir": "Kaust üles" + }, + "github_repository": "Githubi hoidla", + "migrate": { + "backup_database_path_leave_empty_to_disable_backup": "Andmebaasi varundamise tee (varundamise keelamiseks jäta tühjaks):", + "backup_recommended": "Enne migreerimist on soovitatav olemasolev andmebaas varundada. Saame seda sinu eest teha, tehes andmebaasist koopia kausta {defaultBackupPath}.", + "migrating_database": "Andmebaasi migreerimine", + "migration_failed": "Migreerimine ebaõnnestus", + "migration_failed_error": "Andmebaasi migreerimisel ilmnes järgmine tõrge:", + "migration_failed_help": "Te vajalikud parandused ja proovige uuesti. Vastasel juhul ava viga meie {githubLink}-is või otsi abi kanalist {discordLink}.", + "migration_irreversible_warning": "Skeemi migratsiooniprotsess ei ole taastatav. Kui migratsioon on tehtud, ei ühildu andmebaas enam stashi varasemate versioonidega.", + "migration_notes": "Migratsioonimärkmed", + "migration_required": "Migratsioon on nõutud", + "perform_schema_migration": "Skeemi migreerimine", + "schema_too_old": "Praegune stashi andmebaas on skeemi {databaseSchema} versioonil ja see tuleb üle viia versioonile {appSchema}. See Stashi versioon ei tööta ilma andmebaasi migreerimata. Kui sa ei soovi migreerida, pead üle minema versioonile, mis vastab teie andmebaasi skeemile." + }, + "paths": { + "database_filename_empty_for_default": "andmebaasi failinimi (vaikimisi tühi)", + "description": "Järgmisena peame kindlaks määrama, kust leida su pornokogu ja kuhu salvestada stashi andmebaas, genereeritud failid ja cache. Neid sätteid saab hiljem vajadusel muuta.", + "path_to_cache_directory_empty_for_default": "tee cache kaustani (tühi vaikeseadeks)", + "path_to_generated_directory_empty_for_default": "genereeritud kataloogi tee (vaikimisi tühi)", + "set_up_your_paths": "Seadista oma failiteed", + "stash_alert": "Ühtegi kogu teed pole valitud. Stash ei saa skannida mitte ühtegi meediumifaili. Oled sa kindel?", + "where_can_stash_store_blobs": "Kus saab Stash hoida oma andmebaasi binaarseid andmeid?", + "where_can_stash_store_blobs_description": "Stash saab hoida binaarseid andmeid nagu stseeni kaanepilte, esinejate, stuudiote ja siltide pilte kas andmebaasis või failisüsteemis. Vaikimisi salvestab Stash neid andmeid failsüsteemi alamkausta blobs. Kui tahad seda muuta, palun sisesta absoluutne või relatiivne (hetke töökaustaga) tee. Stash loob selle kausta, kui seda juba ei eksisteeri.", + "where_can_stash_store_blobs_description_addendum": "Alternatiivselt, kui tahad hoida neid anmeid andmebaasis, saad jätta selle välja tühjaks. NB: See suurendab su andmebaasi fail ja suurendab andmebaasi migratsiooni aega.", + "where_can_stash_store_cache_files": "Kus saab Stash hoida cache faile?", + "where_can_stash_store_cache_files_description": "Mõne funktsionaalsuse, nagu HLS/DASH reaalas transkodeerimine, töötamiseks vajab Stash cache kausta ajutiste failide jaoks. Vaikimisi loob Stash cache kausta mis asub konfiguratsioonifailiga samas kaustas. Kui tahad seda muuta, palun sisesta absoluutne või relatiivne (töökaustaga) tee. Stash loob selle kausta kui seda juba ei eksisteeri.", + "where_can_stash_store_its_database": "Kuhu saab Stash oma andmebaasi salvestada?", + "where_can_stash_store_its_database_description": "Stash kasutab su porno metaandmete salvestamiseks SQLite'i andmebaasi. Vaikimisi luuakse see konfiguratsioonifaili sisaldavasse kataloogi kui stash-go.sqlite. Kui soovid seda muuta, sisesta absoluutne või suhteline failinimi (praeguse töökataloogi suhtes).", + "where_can_stash_store_its_database_warning": "HOIATUS: hoides andmebaasi erineval süsteemil kui millel Stash jookseb (nt hoides andmebaasi NASil kui Stash jookseb teisel arvutil) on mitte toetatud! SQLite ei ole mõeldud kasutamiseks üle võrgu ja selle proovimine võib väga kergesti viia andmebaasi korrupeerumiseni.", + "where_can_stash_store_its_generated_content": "Kus saab Stash oma genereeritud sisu salvestada?", + "where_can_stash_store_its_generated_content_description": "Pisipiltide, eelvaadete ja spraitide pakkumiseks loob Stash pilte ja videoid. See hõlmab ka toetamata failivormingute ümbertöötlemist. Vaikimisi loob Stash konfiguratsioonifaili sisaldavas kaustas genereeritud kausta. Kui soovid muuta seda, kus see loodud meedium salvestatakse, sisesta absoluutne või suhteline failitee (praeguse töökataloogi suhtes). Stash loob selle kausta, kui seda veel pole.", + "where_is_your_porn_located": "Kus su porno asub?", + "where_is_your_porn_located_description": "Lisage oma pornovideoid ja pilte sisaldavad kataloogid. Stash kasutab neid katalooge skanimise ajal videote ja piltide otsimiseks." + }, + "stash_setup_wizard": "Stashi Ülessättimise Viisard", + "success": { + "getting_help": "Abi saamine", + "help_links": "Kui tekib probleeme või on küsimusi või soovitusi, ava viga lehel {githubLink} või küsi kogukonnalt abi kanalis {discordLink}.", + "in_app_manual_explained": "Soovitame tutvuda rakendusesisese juhendiga, millele pääseb juurde ekraani paremas ülanurgas olevast ikoonist, mis näeb välja järgmine: {icon}", + "next_config_step_one": "Järgmisena suuname su konfiguratsioonilehele. See leht võimaldab sul kohandada, milliseid faile lisada ja välja jätta, määrata oma süsteemi kaitsmiseks kasutajanime ja parooli ning palju muud.", + "next_config_step_two": "Kui oled seadetega rahul, võite alustada sisu Stashi skannimist, klõpsates valikul {localized_task} ja seejärel valikul {localized_scan}.", + "open_collective": "Vaadake meie {open_collective_link}, et näha, kuidas saad aidata kaasa Stashi jätkuvale arendamisele.", + "support_us": "Toeta meid", + "thanks_for_trying_stash": "Aitäh Stashi proovimise eest!", + "welcome_contrib": "Ootame ka panust koodi (veaparandused, täiustused ja uued funktsioonid), testimise, veaaruannete, parendus- ja funktsioonitaotluste ning kasutajatoe kujul. Üksikasjad leiad rakendusesisese juhendi jaotisest Contribution.", + "your_system_has_been_created": "Edukas! Su süsteem on loodud!" + }, + "welcome": { + "config_path_logic_explained": "Stash püüab esmalt leida oma konfiguratsioonifaili (config.yml) praegusest töökataloogist ja kui ta seda sealt ei leia, läheb tagasi kausta $HOME/.stash/config. yml (Windowsis on see %USERPROFILE%\\.stash\\config.yml). Samuti saad panna Stashi lugema konkreetsest konfiguratsioonifailist, käivitades selle suvanditega -c '' või --config ''.", + "in_current_stash_directory": "Kaustas $HOME/.stash", + "in_the_current_working_directory": "Praeguses töökaustas", + "next_step": "Kui oled valmis uue süsteemi seadistamisega alustama, vali, kuhu soovid oma konfiguratsioonifaili salvestada, ja klõpsa nuppu Edasi.", + "store_stash_config": "Kuhu soovid oma Stashi konfiguratsiooni salvestada?", + "unable_to_locate_config": "Kui sa seda näed, siis ei leidnud Stash olemasolevat konfiguratsiooni. See viisard juhendab sind uue konfiguratsiooni seadistamise protsessis.", + "unexpected_explained": "Kui said selle ekraani ootamatult, proovi Stash uuesti käivitada õiges töökataloogis või lipuga -c." + }, + "welcome_specific_config": { + "config_path": "Stash kasutab järgmist konfiguratsioonifaili teed: {path}", + "next_step": "Kui oled valmis uue süsteemi seadistamisega jätkama, klõpsa nuppu Edasi.", + "unable_to_locate_specified_config": "Kui seda näed, siis ei leidnud Stash käsureal ega keskkonnas määratud konfiguratsioonifaili. See viisard juhendab sind uue konfiguratsiooni seadistamise protsessis." + }, + "welcome_to_stash": "Teretulemast Stashi" }, - "paths": { - "database_filename_empty_for_default": "andmebaasi failinimi (vaikimisi tühi)", - "description": "Järgmisena peame kindlaks määrama, kust leida su pornokogu ja kuhu salvestada stashi andmebaas, genereeritud failid ja cache. Neid sätteid saab hiljem vajadusel muuta.", - "path_to_blobs_directory_empty_for_database": "tee blobide kaustani (tühi, et kasutada andmebaasi)", - "path_to_cache_directory_empty_for_default": "tee cache kaustani (tühi vaikeseadeks)", - "path_to_generated_directory_empty_for_default": "genereeritud kataloogi tee (vaikimisi tühi)", - "set_up_your_paths": "Seadista oma failiteed", - "stash_alert": "Ühtegi kogu teed pole valitud. Stash ei saa skannida mitte ühtegi meediumifaili. Oled sa kindel?", - "where_can_stash_store_blobs": "Kus saab Stash hoida oma andmebaasi binaarseid andmeid?", - "where_can_stash_store_blobs_description": "Stash saab hoida binaarseid andmeid nagu stseeni kaanepilte, esinejate, stuudiote ja siltide pilte kas andmebaasis või failisüsteemis. Vaikimisi salvestab Stash neid andmeid failsüsteemi alamkausta blobs. Kui tahad seda muuta, palun sisesta absoluutne või relatiivne (hetke töökaustaga) tee. Stash loob selle kausta, kui seda juba ei eksisteeri.", - "where_can_stash_store_blobs_description_addendum": "Alternatiivselt, kui tahad hoida neid anmeid andmebaasis, saad jätta selle välja tühjaks. NB: See suurendab su andmebaasi fail ja suurendab andmebaasi migratsiooni aega.", - "where_can_stash_store_cache_files": "Kus saab Stash hoida cache faile?", - "where_can_stash_store_cache_files_description": "Mõne funktsionaalsuse, nagu HLS/DASH reaalas transkodeerimine, töötamiseks vajab Stash cache kausta ajutiste failide jaoks. Vaikimisi loob Stash cache kausta mis asub konfiguratsioonifailiga samas kaustas. Kui tahad seda muuta, palun sisesta absoluutne või relatiivne (töökaustaga) tee. Stash loob selle kausta kui seda juba ei eksisteeri.", - "where_can_stash_store_its_database": "Kuhu saab Stash oma andmebaasi salvestada?", - "where_can_stash_store_its_database_description": "Stash kasutab su porno metaandmete salvestamiseks SQLite'i andmebaasi. Vaikimisi luuakse see konfiguratsioonifaili sisaldavasse kataloogi kui stash-go.sqlite. Kui soovid seda muuta, sisesta absoluutne või suhteline failinimi (praeguse töökataloogi suhtes).", - "where_can_stash_store_its_database_warning": "HOIATUS: hoides andmebaasi erineval süsteemil kui millel Stash jookseb (nt hoides andmebaasi NASil kui Stash jookseb teisel arvutil) on mitte toetatud! SQLite ei ole mõeldud kasutamiseks üle võrgu ja selle proovimine võib väga kergesti viia andmebaasi korrupeerumiseni.", - "where_can_stash_store_its_generated_content": "Kus saab Stash oma genereeritud sisu salvestada?", - "where_can_stash_store_its_generated_content_description": "Pisipiltide, eelvaadete ja spraitide pakkumiseks loob Stash pilte ja videoid. See hõlmab ka toetamata failivormingute ümbertöötlemist. Vaikimisi loob Stash konfiguratsioonifaili sisaldavas kaustas genereeritud kausta. Kui soovid muuta seda, kus see loodud meedium salvestatakse, sisesta absoluutne või suhteline failitee (praeguse töökataloogi suhtes). Stash loob selle kausta, kui seda veel pole.", - "where_is_your_porn_located": "Kus su porno asub?", - "where_is_your_porn_located_description": "Lisage oma pornovideoid ja pilte sisaldavad kataloogid. Stash kasutab neid katalooge skanimise ajal videote ja piltide otsimiseks." + "stash_id": "Stashi ID", + "stash_id_endpoint": "Stash ID Lõpp-punkt", + "stash_ids": "Stashi ID-d", + "stashbox": { + "go_review_draft": "Mustandi ülevaatamiseks mine saidile {endpoint_name}.", + "selected_stash_box": "Valitud Stash-Kasti lõpp-punkt", + "submission_failed": "Esitamine ebaõnnestus", + "submission_successful": "Esitamine õnnestus", + "submit_update": "On juba olemas kohas {endpoint_name}" }, - "stash_setup_wizard": "Stashi Ülessättimise Viisard", - "success": { - "getting_help": "Abi saamine", - "help_links": "Kui tekib probleeme või on küsimusi või soovitusi, ava viga lehel {githubLink} või küsi kogukonnalt abi kanalis {discordLink}.", - "in_app_manual_explained": "Soovitame tutvuda rakendusesisese juhendiga, millele pääseb juurde ekraani paremas ülanurgas olevast ikoonist, mis näeb välja järgmine: {icon}", - "next_config_step_one": "Järgmisena suuname su konfiguratsioonilehele. See leht võimaldab sul kohandada, milliseid faile lisada ja välja jätta, määrata oma süsteemi kaitsmiseks kasutajanime ja parooli ning palju muud.", - "next_config_step_two": "Kui oled seadetega rahul, võite alustada sisu Stashi skannimist, klõpsates valikul {localized_task} ja seejärel valikul {localized_scan}.", - "open_collective": "Vaadake meie {open_collective_link}, et näha, kuidas saad aidata kaasa Stashi jätkuvale arendamisele.", - "support_us": "Toeta meid", - "thanks_for_trying_stash": "Aitäh Stashi proovimise eest!", - "welcome_contrib": "Ootame ka panust koodi (veaparandused, täiustused ja uued funktsioonid), testimise, veaaruannete, parendus- ja funktsioonitaotluste ning kasutajatoe kujul. Üksikasjad leiad rakendusesisese juhendi jaotisest Contribution.", - "your_system_has_been_created": "Edukas! Su süsteem on loodud!" + "statistics": "Statistika", + "stats": { + "image_size": "Piltide suurus", + "scenes_duration": "Stseenide pikkus", + "scenes_size": "Stseenide suurus" }, - "welcome": { - "config_path_logic_explained": "Stash püüab esmalt leida oma konfiguratsioonifaili (config.yml) praegusest töökataloogist ja kui ta seda sealt ei leia, läheb tagasi kausta $HOME/.stash/config. yml (Windowsis on see %USERPROFILE%\\.stash\\config.yml). Samuti saad panna Stashi lugema konkreetsest konfiguratsioonifailist, käivitades selle suvanditega -c '' või --config ''.", - "in_current_stash_directory": "Kaustas $HOME/.stash", - "in_the_current_working_directory": "Praeguses töökaustas", - "next_step": "Kui oled valmis uue süsteemi seadistamisega alustama, vali, kuhu soovid oma konfiguratsioonifaili salvestada, ja klõpsa nuppu Edasi.", - "store_stash_config": "Kuhu soovid oma Stashi konfiguratsiooni salvestada?", - "unable_to_locate_config": "Kui sa seda näed, siis ei leidnud Stash olemasolevat konfiguratsiooni. See viisard juhendab sind uue konfiguratsiooni seadistamise protsessis.", - "unexpected_explained": "Kui said selle ekraani ootamatult, proovi Stash uuesti käivitada õiges töökataloogis või lipuga -c." + "status": "Staatus: {statusText}", + "studio": "Stuudio", + "studio_depth": "Tasemed (tühi kõige jaoks)", + "studios": "Stuudiod", + "sub_tag_count": "Alam-Siltide Arv", + "sub_tag_of": "{parent}-i alam-silt", + "sub_tags": "Alam-Sildid", + "subsidiary_studios": "Tütarstuudiod", + "synopsis": "Sisukokkuvõte", + "tag": "Silt", + "tag_count": "Siltide Arv", + "tags": "Sildid", + "tattoos": "Tatoveeringud", + "title": "Pealkiri", + "toast": { + "added_entity": "Lisatud {count, plural, one {{singularEntity}} other {{pluralEntity}}}", + "added_generation_job_to_queue": "Genereerimistöö lisatud järjekorda", + "created_entity": "Loodud {entity}", + "default_filter_set": "Vaikimisi filtrikomplekt", + "delete_past_tense": "Kustutatud {count, plural, one {{singularEntity}} other {{pluralEntity}}}", + "generating_screenshot": "Ekraanipildi genereerimine…", + "image_index_too_large": "Error: Pildi index on suurem kui Galeriis olevate piltide arv", + "merged_scenes": "Ühendatud stseenid", + "merged_tags": "Sildid ühendatud", + "reassign_past_tense": "Fail ümbermääratud", + "removed_entity": "Eemaldatud {count, plural, one {{singularEntity}} other {{pluralEntity}}}", + "rescanning_entity": "Skannin uuesti {count, plural, one {{singularEntity}} other {{pluralEntity}}}…", + "saved_entity": "Salvestatud {entity}", + "started_auto_tagging": "Alustasin automaatset märkimist", + "started_generating": "Alustasin genereerimist", + "started_importing": "Alustasin importimist", + "updated_entity": "Uuendatud {entity}" }, - "welcome_specific_config": { - "config_path": "Stash kasutab järgmist konfiguratsioonifaili teed: {path}", - "next_step": "Kui oled valmis uue süsteemi seadistamisega jätkama, klõpsa nuppu Edasi.", - "unable_to_locate_specified_config": "Kui seda näed, siis ei leidnud Stash käsureal ega keskkonnas määratud konfiguratsioonifaili. See viisard juhendab sind uue konfiguratsiooni seadistamise protsessis." + "total": "Kokku", + "true": "Tõene", + "twitter": "Twitter", + "type": "Tüüp", + "updated_at": "Viimati Uuendatud", + "url": "URL", + "validation": { + "aliases_must_be_unique": "aliased peavad olema erilised", + "date_invalid_form": "${path} peab olema AAAA-KK-PP vormis", + "required": "${path} on nõutud väli" }, - "welcome_to_stash": "Teretulemast Stashi" - }, - "stash_id": "Stashi ID", - "stash_id_endpoint": "Stash ID Lõpp-punkt", - "stash_ids": "Stashi ID-d", - "stashbox": { - "go_review_draft": "Mustandi ülevaatamiseks mine saidile {endpoint_name}.", - "selected_stash_box": "Valitud Stash-Kasti lõpp-punkt", - "submission_failed": "Esitamine ebaõnnestus", - "submission_successful": "Esitamine õnnestus", - "submit_update": "On juba olemas kohas {endpoint_name}" - }, - "statistics": "Statistika", - "stats": { - "image_size": "Piltide suurus", - "scenes_duration": "Stseenide pikkus", - "scenes_size": "Stseenide suurus" - }, - "status": "Staatus: {statusText}", - "studio": "Stuudio", - "studio_depth": "Tasemed (tühi kõige jaoks)", - "studios": "Stuudiod", - "sub_tag_count": "Alam-Siltide Arv", - "sub_tag_of": "{parent}-i alam-silt", - "sub_tags": "Alam-Sildid", - "subsidiary_studios": "Tütarstuudiod", - "synopsis": "Sisukokkuvõte", - "tag": "Silt", - "tag_count": "Siltide Arv", - "tags": "Sildid", - "tattoos": "Tatoveeringud", - "title": "Pealkiri", - "toast": { - "added_entity": "Lisatud {count, plural, one {{singularEntity}} other {{pluralEntity}}}", - "added_generation_job_to_queue": "Genereerimistöö lisatud järjekorda", - "created_entity": "Loodud {entity}", - "default_filter_set": "Vaikimisi filtrikomplekt", - "delete_past_tense": "Kustutatud {count, plural, one {{singularEntity}} other {{pluralEntity}}}", - "generating_screenshot": "Ekraanipildi genereerimine…", - "image_index_too_large": "Error: Pildi index on suurem kui Galeriis olevate piltide arv", - "merged_scenes": "Ühendatud stseenid", - "merged_tags": "Sildid ühendatud", - "reassign_past_tense": "Fail ümbermääratud", - "removed_entity": "Eemaldatud {count, plural, one {{singularEntity}} other {{pluralEntity}}}", - "rescanning_entity": "Skannin uuesti {count, plural, one {{singularEntity}} other {{pluralEntity}}}…", - "saved_entity": "Salvestatud {entity}", - "started_auto_tagging": "Alustasin automaatset märkimist", - "started_generating": "Alustasin genereerimist", - "started_importing": "Alustasin importimist", - "updated_entity": "Uuendatud {entity}" - }, - "total": "Kokku", - "true": "Tõene", - "twitter": "Twitter", - "type": "Tüüp", - "updated_at": "Viimati Uuendatud", - "url": "URL", - "validation": { - "aliases_must_be_unique": "aliased peavad olema erilised", - "date_invalid_form": "${path} peab olema AAAA-KK-PP vormis", - "required": "${path} on nõutud väli" - }, - "videos": "Videod", - "view_all": "Vaata Kõiki", - "weight": "Kaal", - "weight_kg": "Kaal (kg)", - "years_old": "aastat vana", - "zip_file_count": "Zip Failide Arv" + "videos": "Videod", + "view_all": "Vaata Kõiki", + "weight": "Kaal", + "weight_kg": "Kaal (kg)", + "years_old": "aastat vana", + "zip_file_count": "Zip Failide Arv" } diff --git a/ui/v2.5/src/locales/fi-FI.json b/ui/v2.5/src/locales/fi-FI.json index 86efb367cfa..20e03e58431 100644 --- a/ui/v2.5/src/locales/fi-FI.json +++ b/ui/v2.5/src/locales/fi-FI.json @@ -870,7 +870,7 @@ "path": "Polku", "perceptual_similarity": "Aistinvarainen samankaltaisuus (phash)", "performer": "Esiintyjä", - "performerTags": "Esiintyjien tunnisteet", + "performer_tags": "Esiintyjien tunnisteet", "performer_age": "Esiintyjän ikä", "performer_count": "Esiintyjien määrä", "performer_favorite": "Esiintyjä suosikeissa", @@ -914,7 +914,7 @@ "resolution": "Resoluutio", "scene": "Kohtaus", "sceneTagger": "Kohtauksien tunnistetila", - "sceneTags": "Kohtauksen tunnisteet", + "scene_tags": "Kohtauksen tunnisteet", "scene_code": "Studiokoodi", "scene_count": "Kohtauksien määrä", "scene_created_at": "Kohtaus luotu", diff --git a/ui/v2.5/src/locales/fr-FR.json b/ui/v2.5/src/locales/fr-FR.json index dde65f13095..bb1e9674f20 100644 --- a/ui/v2.5/src/locales/fr-FR.json +++ b/ui/v2.5/src/locales/fr-FR.json @@ -8,6 +8,7 @@ "allow_temporarily": "Autoriser temporairement", "anonymise": "Anonymiser", "apply": "Appliquer", + "assign_stashid_to_parent_studio": "Attribuer l'identifiant Stash au studio parent existant et actualiser les métadonnées", "auto_tag": "Étiquetage automatique", "backup": "Sauvegarder", "browse_for_image": "Sélectionner une image…", @@ -24,6 +25,7 @@ "create_chapters": "Créer un chapitre", "create_entity": "Créer {entityType}", "create_marker": "Créer un marqueur", + "create_parent_studio": "Créer un studio parent", "created_entity": "Créé {entity_type} : {entity_name}", "customise": "Personnaliser", "delete": "Supprimer", @@ -38,6 +40,7 @@ "download_backup": "Télécharger une sauvegarde", "edit": "Éditer", "edit_entity": "Éditer {entityType}", + "encoding_image": "Encodage de l'image", "export": "Exporter", "export_all": "Tout exporter…", "find": "Rechercher", @@ -67,6 +70,7 @@ "not_running": "arrêt", "open_in_external_player": "Ouvrir dans un lecteur externe", "open_random": "Ouvrir au hasard", + "optimise_database": "Optimiser la base de données", "overwrite": "Écraser", "play_random": "Lecture aléatoire", "play_selected": "Lire la sélection", @@ -129,6 +133,7 @@ "also_known_as": "Également connu comme", "appears_with": "Apparaît avec", "ascending": "Ascendant", + "audio_codec": "Codec audio", "average_resolution": "Résolution moyenne", "between_and": "et", "birth_year": "Année de naissance", @@ -454,10 +459,18 @@ "include_male_performers": "Inclure les performeurs masculins", "set_cover_images": "Définir les vignettes", "set_organized": "Définir le drapeau organisé", + "skip_multiple_matches": "Ignorer les correspondances qui ont plusieurs résultats", + "skip_multiple_matches_tooltip": "Si cette option n'est pas activée et que plusieurs résultats sont retournés, un seul sera aléatoirement choisi pour correspondre", + "skip_single_name_performers": "Ignorer les performeurs à nom unique sans désambiguïsation", + "skip_single_name_performers_tooltip": "Si cette option n'est pas activée, les performeurs qui sont souvent génériques, comme Samantha ou Olga, correspondront", "source": "Source", "source_options": "Options pour {source}", "sources": "Sources", - "strategy": "Stratégie" + "strategy": "Stratégie", + "tag_skipped_matches": "Étiqueter les correspondances ignorées avec", + "tag_skipped_matches_tooltip": "Créer une étiquette telle que \"Identifier : plusieurs correspondances\" filtrable dans la vue Étiquetage de scène et déterminer la bonne correspondance manuellement", + "tag_skipped_performer_tooltip": "Créer une étiquette telle que \"Identifier : Performeur à nom unique\" filtrable dans la vue Étiquetage de scène et déterminer comment les traiter", + "tag_skipped_performers": "Étiqueter les performeurs ignorés avec" }, "import_from_exported_json": "Importation à partir du JSON exporté dans le répertoire des métadonnées. Efface la base de données existante.", "incremental_import": "Importation incrémentielle à partir d'un fichier zip d'exportation fourni.", @@ -475,6 +488,8 @@ }, "migrations": "Migrations", "only_dry_run": "Effectuer un essai à blanc. Ne rien supprimer", + "optimise_database": "Essayer d'améliorer les performances en analysant et en reconstruisant l'ensemble de la base de données.", + "optimise_database_warning": "Attention : pendant l'exécution de cette tâche, toute opération modifiant la base de données échouera et, selon la taille de la base de données, elle peut prendre plusieurs minutes pour aboutir. Elle requiert au minimum autant d'espace disque libre que la taille de votre base de données, mais 1.5x est recommandé.", "plugin_tasks": "Tâches de Plugin", "scan": { "scanning_all_paths": "Analyse tous les chemins", @@ -538,6 +553,21 @@ "skip_opening_browser": "Ne pas ouvrir de navigateur", "skip_opening_browser_on_startup": "Ignorer l'ouverture automatique du navigateur lors du démarrage" }, + "detail": { + "compact_expanded_details": { + "description": "Activée, cette option présentera des détails plus étendus en préservant une présentation compacte", + "heading": "Détails étendus compacts" + }, + "enable_background_image": { + "description": "Afficher l'image d'arrière-plan sur la page Détail.", + "heading": "Activer l'image d'arrière-plan" + }, + "heading": "Page Détail", + "show_all_details": { + "description": "Activée, tous les détails du contenu seront affichés par défaut et chaque élément détaillé tiendra dans une seule colonne", + "heading": "Montrer tous les détails" + } + }, "editing": { "disable_dropdown_create": { "description": "Supprimer la possibilité de créer de nouveaux objets à partir des sélecteurs de liste déroulante", @@ -657,6 +687,7 @@ "description": "Lire la scène suivante de la file d'attente lorsque une vidéo se termine", "heading": "Continuer la liste de lecture par défaut" }, + "enable_chromecast": "Activer Chromecast", "show_scrubber": "Montrer la barre de progression", "track_activity": "Suivre l'activité", "vr_tag": { @@ -702,7 +733,11 @@ } } }, - "title": "Interface utilisateur" + "title": "Interface utilisateur", + "use_stash_hosted_funscript": { + "description": "Activée, les funscripts sont transmis directement de Stash à votre dispositif Handy sans recourir au serveur Handy de tierce partie. Nécessite que Stash soit accessible depuis votre dispositif Handy.", + "heading": "Transmettre directement les funscripts" + } } }, "configuration": "Configuration", @@ -811,6 +846,7 @@ "source": "Source" }, "overwrite_filter_confirm": "Êtes-vous sûr de vouloir remplacer la requête sauvegardée existante {entityName} ?", + "performers_found": "{count} performeurs trouvés", "reassign_entity_title": "{count, plural, one {Réaffecté {singularEntity}} other {Réaffectés {pluralEntity}}}", "reassign_files": { "destination": "Réaffecter à" @@ -915,6 +951,7 @@ "errors": { "image_index_greater_than_zero": "L'index de l'image doit être supérieur à 0", "lazy_component_error_help": "Si vous avez récemment mis à jour Stash, merci de recharger la page ou de vider le cache de votre navigateur.", + "loading_type": "Erreur de chargement de {type}", "something_went_wrong": "Quelque chose n'a pas fonctionné." }, "ethnicity": "Ethnicité", @@ -977,6 +1014,7 @@ "include_parent_tags": "Inclure les étiquettes parentes", "include_sub_studios": "Inclure les studios affiliés", "include_sub_tags": "Inclure les étiquettes affiliées", + "index_of_total": "{index} de {total}", "instagram": "Instagram", "interactive": "Interactif", "interactive_speed": "Vitesse interactive", @@ -1023,6 +1061,7 @@ "previous": "Précédente" }, "parent_of": "Parent de {children}", + "parent_studio": "Studio parent", "parent_studios": "Studio parent", "parent_tag_count": "Nombre d'étiquettes parentes", "parent_tags": "Étiquettes parentes", @@ -1033,7 +1072,7 @@ "penis_length_cm": "Longueur du pénis (cm)", "perceptual_similarity": "Similitude perceptuelle (empreinte)", "performer": "Performeurs", - "performerTags": "Étiquettes de performeur", + "performer_tags": "Étiquettes de performeur", "performer_age": "Âge du performeur", "performer_count": "Nombre de performeurs", "performer_favorite": "Performeur favori", @@ -1088,7 +1127,7 @@ "resume_time": "Reprendre le temps", "scene": "Scène", "sceneTagger": "Étiqueteuse de scènes", - "sceneTags": "Étiquettes de la scène", + "scene_tags": "Étiquettes de la scène", "scene_code": "Code studio", "scene_count": "Nombre de scènes", "scene_created_at": "Scène créée le", @@ -1204,6 +1243,7 @@ "stashbox": { "go_review_draft": "Allez sur {endpoint_name} pour examiner l'ébauche.", "selected_stash_box": "Point de terminaison Stash-Box sélectionné", + "source": "Source Stash-Box", "submission_failed": "Envoi échoué", "submission_successful": "Envoi réussi", "submit_update": "Existe déjà dans {endpoint_name}" @@ -1212,11 +1252,54 @@ "stats": { "image_size": "Poids des images", "scenes_duration": "Durée des scènes", - "scenes_size": "Poids des scènes" + "scenes_played": "Scènes visionnées", + "scenes_size": "Poids des scènes", + "total_o_count": "Nombre total de O-", + "total_play_count": "Nombre de visionnage total", + "total_play_duration": "Durée de visionnage totale" }, "status": "Statut : {statusText}", "studio": "Studio", + "studio_and_parent": "Studio & Parent", "studio_depth": "Niveaux (vides pour tous)", + "studio_tagger": { + "add_new_studios": "Ajouter des nouveaux studios", + "any_names_entered_will_be_queried": "Tous les noms saisis seront interrogés depuis l'instance Stash-Box distante et ajoutés si trouvés. Seules les correspondances exactes seront considérées comme telles.", + "batch_add_studios": "Ajouter des studios par lots", + "batch_update_studios": "Mise à jour des studios par lots", + "config": { + "active_stash-box_instance": "Instance active Stash-Box :", + "create_parent_desc": "Créer les studios parents manquants, ou étiqueter et mettre à jour les données/image pour les studios parents existants avec des correspondances de noms exactes", + "create_parent_label": "Créer les studios parents", + "edit_excluded_fields": "Éditer les champs exclus", + "excluded_fields": "Champs exclus :", + "no_fields_are_excluded": "Aucun champ n'est exclu", + "no_instances_found": "Aucunes instances trouvées", + "these_fields_will_not_be_changed_when_updating_studios": "Ces champs ne seront pas modifiés lors de la mise à jour des studios." + }, + "create_or_tag_parent_studios": "Créer les studios parents manquants ou étiqueter les existants", + "current_page": "Page actuelle", + "failed_to_save_studio": "Enregistrement du studio \"{studio}\" échoué", + "name_already_exists": "Nom déjà existant", + "network_error": "Erreur réseau", + "no_results_found": "Aucuns résultats trouvés.", + "number_of_studios_will_be_processed": "{studio_count} studios seront traités", + "query_all_studios_in_the_database": "Tous les studios de la base de données", + "refresh_tagged_studios": "Rafraîchir les studios étiquetés", + "refreshing_will_update_the_data": "Un rafraîchissement mettra à jour les données de tous les studios étiquetés depuis l'instance stash-box.", + "status_tagging_job_queued": "Statut : Étiquetage en file d'attente", + "status_tagging_studios": "Statut : Étiquetage des studios", + "studio_already_tagged": "Studio déjà étiqueté", + "studio_names_separated_by_comma": "Noms de studio séparés par une virgule", + "studio_selection": "Sélection de studio", + "studio_successfully_tagged": "Studio étiqueté avec succès", + "tag_status": "Statut de l'étiquette", + "to_use_the_studio_tagger": "Pour utiliser l’étiqueteur de studio, une instance stash-box doit être configurée.", + "untagged_studios": "Studios non étiquetés", + "update_studio": "Mise à jour du studio", + "update_studios": "Actualiser les studios", + "updating_untagged_studios_description": "L'actualisation des studios non étiquetés tentera de faire correspondre chaques studios qui n'ont pas de StashID et mettre à jour les métadonnées." + }, "studios": "Studios", "sub_tag_count": "Nombre d'étiquettes affiliées", "sub_tag_of": "Étiquette affiliée de {parent}", @@ -1253,11 +1336,14 @@ "type": "Type", "updated_at": "Actualisé le", "url": "URL", + "urls": "URLs", "validation": { "aliases_must_be_unique": "Les alias doivent être uniques", "date_invalid_form": "${path} doit être au format AAAA-MM-JJ", - "required": "${path} est un champ requis" + "required": "${path} est un champ requis", + "urls_must_be_unique": "Les URLs doivent être uniques" }, + "video_codec": "Codec vidéo", "videos": "Vidéos", "view_all": "Tout voir", "weight": "Poids", diff --git a/ui/v2.5/src/locales/hu-HU.json b/ui/v2.5/src/locales/hu-HU.json index 7fff63874ee..59bfe7e769f 100644 --- a/ui/v2.5/src/locales/hu-HU.json +++ b/ui/v2.5/src/locales/hu-HU.json @@ -437,7 +437,7 @@ "parent_tags": "Szülő-címkék", "path": "Elérési Út", "performer": "Szereplő", - "performerTags": "Szereplő Címkék", + "performer_tags": "Szereplő Címkék", "performer_age": "Szereplő Kora", "performer_count": "Szereplők Száma", "performer_favorite": "Szereplő Kedvencek Közt", @@ -460,7 +460,7 @@ "resolution": "Felbontás", "scene": "Jelenet", "sceneTagger": "Jelenetcímkéző", - "sceneTags": "Jelenetcímkék", + "scene_tags": "Jelenetcímkék", "scene_count": "Jelenetszám", "scene_id": "Jelenet ID", "scenes": "Jelenetek", diff --git a/ui/v2.5/src/locales/it-IT.json b/ui/v2.5/src/locales/it-IT.json index 6f75b5b2e9b..9b51beb61ef 100644 --- a/ui/v2.5/src/locales/it-IT.json +++ b/ui/v2.5/src/locales/it-IT.json @@ -986,7 +986,7 @@ "resume_time": "Tempo Continuazione", "scene": "Scena", "sceneTagger": "Tagger Scena", - "sceneTags": "Tag Scena", + "scene_tags": "Tag Scena", "scene_code": "Codice dello Studio", "scene_count": "Numero Scene", "scene_created_at": "Scena Creata Al", diff --git a/ui/v2.5/src/locales/ja-JP.json b/ui/v2.5/src/locales/ja-JP.json index 27c74a3d84c..4dd4857f9d4 100644 --- a/ui/v2.5/src/locales/ja-JP.json +++ b/ui/v2.5/src/locales/ja-JP.json @@ -940,7 +940,7 @@ "path": "パス", "perceptual_similarity": "知覚的類似性 (phash)", "performer": "出演者", - "performerTags": "出演者タグ", + "performer_tags": "出演者タグ", "performer_age": "出演者の年齢", "performer_count": "出演者数", "performer_favorite": "出演者をお気に入り済み", @@ -995,7 +995,7 @@ "resume_time": "レジューム時間", "scene": "シーン", "sceneTagger": "シーン一括タグ付け", - "sceneTags": "シーンタグ", + "scene_tags": "シーンタグ", "scene_code": "スタジオコード", "scene_count": "シーン数", "scene_created_at": "シーンの作成日時", diff --git a/ui/v2.5/src/locales/ko-KR.json b/ui/v2.5/src/locales/ko-KR.json index a21a568d1cb..b1ba398159f 100644 --- a/ui/v2.5/src/locales/ko-KR.json +++ b/ui/v2.5/src/locales/ko-KR.json @@ -1022,7 +1022,7 @@ "penis_length_cm": "자지 크기 (cm)", "perceptual_similarity": "유사도 (phash)", "performer": "배우", - "performerTags": "배우 태그", + "performer_tags": "배우 태그", "performer_age": "배우 나이", "performer_count": "배우 수", "performer_favorite": "즐겨찾기한 배우", @@ -1077,7 +1077,7 @@ "resume_time": "재시작 시간", "scene": "영상", "sceneTagger": "영상 태거", - "sceneTags": "영상 태그", + "scene_tags": "영상 태그", "scene_code": "스튜디오 코드", "scene_count": "영상 개수", "scene_created_at": "영상 생성 날짜", diff --git a/ui/v2.5/src/locales/nl-NL.json b/ui/v2.5/src/locales/nl-NL.json index f2c708e5d79..c768a40960a 100644 --- a/ui/v2.5/src/locales/nl-NL.json +++ b/ui/v2.5/src/locales/nl-NL.json @@ -821,7 +821,7 @@ "path": "Pad", "perceptual_similarity": "Perceptuele gelijkenis (phash)", "performer": "Performer", - "performerTags": "Peformer Labels", + "performer_tags": "Peformer Labels", "performer_age": "Leeftijd artiest", "performer_count": "Performer Aantal", "performer_favorite": "Artiest favoriet", @@ -855,7 +855,7 @@ "resolution": "Resolutie", "scene": "Scène", "sceneTagger": "Scene Labelen", - "sceneTags": "Scene Labels", + "scene_tags": "Scene Labels", "scene_count": "Scene Aantal", "scene_id": "Scene ID", "scenes": "Scènes", diff --git a/ui/v2.5/src/locales/pl-PL.json b/ui/v2.5/src/locales/pl-PL.json index 1c98751faf1..ec6ef87181a 100644 --- a/ui/v2.5/src/locales/pl-PL.json +++ b/ui/v2.5/src/locales/pl-PL.json @@ -1030,7 +1030,7 @@ "penis_length_cm": "Długość penisa (cm)", "perceptual_similarity": "Podobieństwo percepcyjne (phash)", "performer": "Aktor", - "performerTags": "Tagi aktorów", + "performer_tags": "Tagi aktorów", "performer_age": "Wiek aktora", "performer_count": "Liczba aktorów", "performer_favorite": "Ulubiony aktor", @@ -1085,7 +1085,7 @@ "resume_time": "Rozpocznij od", "scene": "Scena", "sceneTagger": "Otagowywacz scen", - "sceneTags": "Tagi sceny", + "scene_tags": "Tagi sceny", "scene_code": "Kod studia", "scene_count": "Liczba scen", "scene_created_at": "Scena utworzona", diff --git a/ui/v2.5/src/locales/pt-BR.json b/ui/v2.5/src/locales/pt-BR.json index be494d6ee46..fec188b00ef 100644 --- a/ui/v2.5/src/locales/pt-BR.json +++ b/ui/v2.5/src/locales/pt-BR.json @@ -847,7 +847,7 @@ "path": "Caminho", "perceptual_similarity": "Semelhança Perceptiva (phash)", "performer": "Artista", - "performerTags": "Etiquetas de artistas", + "performer_tags": "Etiquetas de artistas", "performer_age": "Idade do Artista", "performer_count": "Contagem de artistas", "performer_favorite": "Artista Favoritado", @@ -898,7 +898,7 @@ "resolution": "Resolução", "scene": "Cena", "sceneTagger": "Etiquetador de cena", - "sceneTags": "Etiquetas da cena", + "scene_tags": "Etiquetas da cena", "scene_count": "Contagem de cena", "scene_id": "Cena ID", "scenes": "Cenas", diff --git a/ui/v2.5/src/locales/ru-RU.json b/ui/v2.5/src/locales/ru-RU.json index 650b6b1b5e2..14fa420fea3 100644 --- a/ui/v2.5/src/locales/ru-RU.json +++ b/ui/v2.5/src/locales/ru-RU.json @@ -933,7 +933,7 @@ "path": "Путь", "perceptual_similarity": "Воспринимаемое сходство (phash)", "performer": "Актер", - "performerTags": "Теги актера", + "performer_tags": "Теги актера", "performer_age": "Возраст актера", "performer_count": "Количество актеров", "performer_favorite": "Участник добавлен в избранное", @@ -988,7 +988,7 @@ "resume_time": "Таймкод воспроизведения", "scene": "Сцена", "sceneTagger": "Пометка сцен тэгами", - "sceneTags": "Тэги сцен", + "scene_tags": "Тэги сцен", "scene_code": "Идентификатор сцены", "scene_count": "Количество сцен", "scene_created_at": "Сцена создана", diff --git a/ui/v2.5/src/locales/sv-SE.json b/ui/v2.5/src/locales/sv-SE.json index 8870bfed6c7..50bbc68b84b 100644 --- a/ui/v2.5/src/locales/sv-SE.json +++ b/ui/v2.5/src/locales/sv-SE.json @@ -1033,7 +1033,7 @@ "penis_length_cm": "Penislängd (cm)", "perceptual_similarity": "Perceptuell likhet (phash)", "performer": "Stjärna", - "performerTags": "Stjärntagg", + "performer_tags": "Stjärntagg", "performer_age": "Ålder på stjärna", "performer_count": "Antal stjärnor", "performer_favorite": "Favoritiserad stjärna", @@ -1088,7 +1088,7 @@ "resume_time": "Återupptagningstid", "scene": "Scen", "sceneTagger": "Scentaggaren", - "sceneTags": "Scentaggar", + "scene_tags": "Scentaggar", "scene_code": "Studiokod", "scene_count": "Antal scener", "scene_created_at": "Scenen Skapad", diff --git a/ui/v2.5/src/locales/tr-TR.json b/ui/v2.5/src/locales/tr-TR.json index 476b9fe17c9..bf14087af48 100644 --- a/ui/v2.5/src/locales/tr-TR.json +++ b/ui/v2.5/src/locales/tr-TR.json @@ -747,7 +747,7 @@ "part_of": "{parent} öğesinin parçası", "path": "Konum", "performer": "Oyuncu", - "performerTags": "Oyuncu Etiketleri", + "performer_tags": "Oyuncu Etiketleri", "performer_count": "Oyuncu Sayısı", "performer_image": "Oyuncu Resmi", "performers": "Oyuncular", @@ -758,7 +758,7 @@ "resolution": "Çözünürlük", "scene": "Sahne", "sceneTagger": "Sahne Etiketleyici", - "sceneTags": "Sahne Etiketleri", + "scene_tags": "Sahne Etiketleri", "scene_count": "Sahne Sayısı", "scene_id": "Sahne Kimliği (ID)", "scenes": "Sahneler", diff --git a/ui/v2.5/src/locales/zh-CN.json b/ui/v2.5/src/locales/zh-CN.json index 5e10459bc9e..f2c61f9492c 100644 --- a/ui/v2.5/src/locales/zh-CN.json +++ b/ui/v2.5/src/locales/zh-CN.json @@ -1018,7 +1018,7 @@ "path": "路径", "perceptual_similarity": "感知的类似程度(感知码)", "performer": "演员", - "performerTags": "演员标签", + "performer_tags": "演员标签", "performer_age": "演员年龄", "performer_count": "演员数量", "performer_favorite": "演员已收藏", @@ -1073,7 +1073,7 @@ "resume_time": "恢复时间", "scene": "短片", "sceneTagger": "短片标记器", - "sceneTags": "短片标记", + "scene_tags": "短片标记", "scene_code": "工作室代码", "scene_count": "短片数量", "scene_created_at": "短片建立在", diff --git a/ui/v2.5/src/locales/zh-TW.json b/ui/v2.5/src/locales/zh-TW.json index e8685a94fc7..ee446d32a38 100644 --- a/ui/v2.5/src/locales/zh-TW.json +++ b/ui/v2.5/src/locales/zh-TW.json @@ -956,7 +956,7 @@ "path": "路徑", "perceptual_similarity": "感知相似度 (PHash)", "performer": "演員", - "performerTags": "演員標籤", + "performer_tags": "演員標籤", "performer_age": "演員年齡", "performer_count": "演員數量", "performer_favorite": "已收藏的演員", @@ -1011,7 +1011,7 @@ "resume_time": "恢復播放時間", "scene": "短片", "sceneTagger": "短片標籤器", - "sceneTags": "短片標籤", + "scene_tags": "短片標籤", "scene_code": "番號", "scene_count": "短片數量", "scene_created_at": "短片建立於", diff --git a/ui/v2.5/src/models/list-filter/criteria/captions.ts b/ui/v2.5/src/models/list-filter/criteria/captions.ts index 13c72dc7756..2d0fbdb1d4f 100644 --- a/ui/v2.5/src/models/list-filter/criteria/captions.ts +++ b/ui/v2.5/src/models/list-filter/criteria/captions.ts @@ -10,7 +10,6 @@ class CaptionsCriterionOptionType extends CriterionOption { super({ messageID: value, type: value, - parameterName: value, modifierOptions: [ CriterionModifier.Includes, CriterionModifier.Excludes, @@ -19,6 +18,7 @@ class CaptionsCriterionOptionType extends CriterionOption { ], defaultModifier: CriterionModifier.Includes, options: languageStrings, + makeCriterion: () => new CaptionCriterion(), }); } } diff --git a/ui/v2.5/src/models/list-filter/criteria/circumcised.ts b/ui/v2.5/src/models/list-filter/criteria/circumcised.ts index c18aa1b017f..dd8fbfbb171 100644 --- a/ui/v2.5/src/models/list-filter/criteria/circumcised.ts +++ b/ui/v2.5/src/models/list-filter/criteria/circumcised.ts @@ -16,6 +16,7 @@ export const CircumcisedCriterionOption = new CriterionOption({ CriterionModifier.IsNull, CriterionModifier.NotNull, ], + makeCriterion: () => new CircumcisedCriterion(), }); export class CircumcisedCriterion extends MultiStringCriterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/country.ts b/ui/v2.5/src/models/list-filter/criteria/country.ts index 9c70b621d78..2fa24790022 100644 --- a/ui/v2.5/src/models/list-filter/criteria/country.ts +++ b/ui/v2.5/src/models/list-filter/criteria/country.ts @@ -3,11 +3,7 @@ import { CriterionModifier } from "src/core/generated-graphql"; import { getCountryByISO } from "src/utils/country"; import { StringCriterion, StringCriterionOption } from "./criterion"; -const countryCriterionOption = new StringCriterionOption( - "country", - "country", - "country" -); +const countryCriterionOption = new StringCriterionOption("country", "country"); export class CountryCriterion extends StringCriterion { constructor() { diff --git a/ui/v2.5/src/models/list-filter/criteria/criterion.ts b/ui/v2.5/src/models/list-filter/criteria/criterion.ts index acc6b3ff16a..feee489b981 100644 --- a/ui/v2.5/src/models/list-filter/criteria/criterion.ts +++ b/ui/v2.5/src/models/list-filter/criteria/criterion.ts @@ -121,7 +121,7 @@ export abstract class Criterion { } public getId(): string { - return `${this.criterionOption.parameterName}-${this.modifier.toString()}`; // TODO add values? + return `${this.criterionOption.type}-${this.modifier.toString()}`; // TODO add values? } public toJSON() { @@ -154,7 +154,7 @@ export abstract class Criterion { // eslint-disable-next-line @typescript-eslint/no-explicit-any public apply(outputFilter: Record) { // eslint-disable-next-line no-param-reassign - outputFilter[this.criterionOption.parameterName] = this.toCriterionInput(); + outputFilter[this.criterionOption.type] = this.toCriterionInput(); } // eslint-disable-next-line @typescript-eslint/no-explicit-any @@ -164,50 +164,68 @@ export abstract class Criterion { modifier: this.modifier, }; } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + public toSavedFilter(outputFilter: Record) { + outputFilter[this.criterionOption.type] = { + value: this.value, + modifier: this.modifier, + }; + } } -export type InputType = "number" | "text" | undefined; +export type InputType = + | "number" + | "text" + | "performers" + | "studios" + | "tags" + | "performer_tags" + | "scene_tags" + | "movies" + | "galleries" + | undefined; interface ICriterionOptionsParams { messageID: string; type: CriterionType; inputType?: InputType; - parameterName?: string; modifierOptions?: CriterionModifier[]; defaultModifier?: CriterionModifier; options?: Option[]; + makeCriterion: () => Criterion; } export class CriterionOption { public readonly messageID: string; public readonly type: CriterionType; - public readonly parameterName: string; public readonly modifierOptions: CriterionModifier[]; public readonly defaultModifier: CriterionModifier; public readonly options: Option[] | undefined; public readonly inputType: InputType; + public readonly makeCriterionFn: ( + o: CriterionOption + ) => Criterion; constructor(options: ICriterionOptionsParams) { this.messageID = options.messageID; this.type = options.type; - this.parameterName = options.parameterName ?? options.type; this.modifierOptions = options.modifierOptions ?? []; this.defaultModifier = options.defaultModifier ?? CriterionModifier.Equals; this.options = options.options; this.inputType = options.inputType; + this.makeCriterionFn = options.makeCriterion; + } + + public makeCriterion() { + return this.makeCriterionFn(this); } } export class StringCriterionOption extends CriterionOption { - constructor( - messageID: string, - value: CriterionType, - parameterName?: string, - options?: Option[] - ) { + constructor(messageID: string, type: CriterionType, options?: Option[]) { super({ messageID, - type: value, - parameterName, + type, modifierOptions: [ CriterionModifier.Equals, CriterionModifier.NotEquals, @@ -221,20 +239,16 @@ export class StringCriterionOption extends CriterionOption { defaultModifier: CriterionModifier.Equals, options, inputType: "text", + makeCriterion: () => new StringCriterion(this), }); } } export function createStringCriterionOption( - value: CriterionType, - messageID?: string, - parameterName?: string + type: CriterionType, + messageID?: string ) { - return new StringCriterionOption( - messageID ?? value, - value, - parameterName ?? messageID ?? value - ); + return new StringCriterionOption(messageID ?? type, type); } export class StringCriterion extends Criterion { @@ -274,16 +288,10 @@ export class MultiStringCriterion extends Criterion { } export class MandatoryStringCriterionOption extends CriterionOption { - constructor( - messageID: string, - value: CriterionType, - parameterName?: string, - options?: Option[] - ) { + constructor(messageID: string, value: CriterionType, options?: Option[]) { super({ messageID, type: value, - parameterName, modifierOptions: [ CriterionModifier.Equals, CriterionModifier.NotEquals, @@ -295,45 +303,42 @@ export class MandatoryStringCriterionOption extends CriterionOption { defaultModifier: CriterionModifier.Equals, options, inputType: "text", + makeCriterion: () => new StringCriterion(this), }); } } export function createMandatoryStringCriterionOption( value: CriterionType, - messageID?: string, - parameterName?: string + messageID?: string ) { - return new MandatoryStringCriterionOption( - messageID ?? value, - value, - parameterName ?? messageID ?? value - ); + return new MandatoryStringCriterionOption(messageID ?? value, value); } export class PathCriterionOption extends StringCriterionOption {} export function createPathCriterionOption( - value: CriterionType, - messageID?: string, - parameterName?: string + type: CriterionType, + messageID?: string ) { - return new PathCriterionOption( - messageID ?? value, - value, - parameterName ?? messageID ?? value - ); + return new PathCriterionOption(messageID ?? type, type); } export class BooleanCriterionOption extends CriterionOption { - constructor(messageID: string, value: CriterionType, parameterName?: string) { + constructor( + messageID: string, + value: CriterionType, + makeCriterion?: () => Criterion + ) { super({ messageID, type: value, - parameterName, modifierOptions: [], defaultModifier: CriterionModifier.Equals, options: [true.toString(), false.toString()], + makeCriterion: makeCriterion + ? makeCriterion + : () => new BooleanCriterion(this), }); } } @@ -350,27 +355,16 @@ export class BooleanCriterion extends StringCriterion { export function createBooleanCriterionOption( value: CriterionType, - messageID?: string, - parameterName?: string + messageID?: string ) { - return new BooleanCriterionOption( - messageID ?? value, - value, - parameterName ?? messageID ?? value - ); + return new BooleanCriterionOption(messageID ?? value, value); } export class NumberCriterionOption extends CriterionOption { - constructor( - messageID: string, - value: CriterionType, - parameterName?: string, - options?: Option[] - ) { + constructor(messageID: string, value: CriterionType, options?: Option[]) { super({ messageID, type: value, - parameterName, modifierOptions: [ CriterionModifier.Equals, CriterionModifier.NotEquals, @@ -384,16 +378,16 @@ export class NumberCriterionOption extends CriterionOption { defaultModifier: CriterionModifier.Equals, options, inputType: "number", + makeCriterion: () => new NumberCriterion(this), }); } } export class NullNumberCriterionOption extends CriterionOption { - constructor(messageID: string, value: CriterionType, parameterName?: string) { + constructor(messageID: string, value: CriterionType) { super({ messageID, type: value, - parameterName, modifierOptions: [ CriterionModifier.Equals, CriterionModifier.NotEquals, @@ -406,16 +400,17 @@ export class NullNumberCriterionOption extends CriterionOption { ], defaultModifier: CriterionModifier.Equals, inputType: "number", + makeCriterion: () => new NumberCriterion(this), }); } } export function createNumberCriterionOption(value: CriterionType) { - return new NumberCriterionOption(value, value, value); + return new NumberCriterionOption(value, value); } export function createNullNumberCriterionOption(value: CriterionType) { - return new NullNumberCriterionOption(value, value, value); + return new NullNumberCriterionOption(value, value); } export class NumberCriterion extends Criterion { @@ -437,8 +432,8 @@ export class NumberCriterion extends Criterion { protected toCriterionInput(): IntCriterionInput { return { modifier: this.modifier, - value: this.value.value ?? 0, - value2: this.value.value2, + value: this.value?.value ?? 0, + value2: this.value?.value2, }; } @@ -487,8 +482,8 @@ export class ILabeledIdCriterionOption extends CriterionOption { constructor( messageID: string, value: CriterionType, - parameterName: string, - includeAll: boolean + includeAll: boolean, + inputType: InputType ) { const modifierOptions = [ CriterionModifier.Includes, @@ -506,9 +501,10 @@ export class ILabeledIdCriterionOption extends CriterionOption { super({ messageID, type: value, - parameterName, modifierOptions, defaultModifier, + makeCriterion: () => new ILabeledIdCriterion(this), + inputType, }); } } @@ -684,11 +680,10 @@ export class IHierarchicalLabeledIdCriterion extends Criterion new NumberCriterion(this), }); } } @@ -707,7 +703,7 @@ export function createMandatoryNumberCriterionOption( value: CriterionType, messageID?: string ) { - return new MandatoryNumberCriterionOption(messageID ?? value, value, value); + return new MandatoryNumberCriterionOption(messageID ?? value, value); } export class DurationCriterion extends Criterion { @@ -718,8 +714,8 @@ export class DurationCriterion extends Criterion { protected toCriterionInput(): IntCriterionInput { return { modifier: this.modifier, - value: this.value.value ?? 0, - value2: this.value.value2, + value: this.value?.value ?? 0, + value2: this.value?.value2, }; } @@ -771,16 +767,10 @@ export class PhashDuplicateCriterion extends StringCriterion { } export class DateCriterionOption extends CriterionOption { - constructor( - messageID: string, - value: CriterionType, - parameterName?: string, - options?: Option[] - ) { + constructor(messageID: string, value: CriterionType, options?: Option[]) { super({ messageID, type: value, - parameterName, modifierOptions: [ CriterionModifier.Equals, CriterionModifier.NotEquals, @@ -794,12 +784,13 @@ export class DateCriterionOption extends CriterionOption { defaultModifier: CriterionModifier.Equals, options, inputType: "text", + makeCriterion: () => new DateCriterion(this), }); } } export function createDateCriterionOption(value: CriterionType) { - return new DateCriterionOption(value, value, value); + return new DateCriterionOption(value, value); } export class DateCriterion extends Criterion { @@ -813,8 +804,8 @@ export class DateCriterion extends Criterion { protected toCriterionInput(): DateCriterionInput { return { modifier: this.modifier, - value: this.value.value, - value2: this.value.value2, + value: this.value?.value, + value2: this.value?.value2, }; } @@ -856,16 +847,10 @@ export class DateCriterion extends Criterion { } export class TimestampCriterionOption extends CriterionOption { - constructor( - messageID: string, - value: CriterionType, - parameterName?: string, - options?: Option[] - ) { + constructor(messageID: string, value: CriterionType, options?: Option[]) { super({ messageID, type: value, - parameterName, modifierOptions: [ CriterionModifier.GreaterThan, CriterionModifier.LessThan, @@ -877,19 +862,20 @@ export class TimestampCriterionOption extends CriterionOption { defaultModifier: CriterionModifier.GreaterThan, options, inputType: "text", + makeCriterion: () => new TimestampCriterion(this), }); } } export function createTimestampCriterionOption(value: CriterionType) { - return new TimestampCriterionOption(value, value, value); + return new TimestampCriterionOption(value, value); } export class TimestampCriterion extends Criterion { public encodeValue() { return { - value: this.value.value, - value2: this.value.value2, + value: this.value?.value, + value2: this.value?.value2, }; } @@ -950,16 +936,10 @@ export class TimestampCriterion extends Criterion { } export class MandatoryTimestampCriterionOption extends CriterionOption { - constructor( - messageID: string, - value: CriterionType, - parameterName?: string, - options?: Option[] - ) { + constructor(messageID: string, value: CriterionType, options?: Option[]) { super({ messageID, type: value, - parameterName, modifierOptions: [ CriterionModifier.GreaterThan, CriterionModifier.LessThan, @@ -969,10 +949,11 @@ export class MandatoryTimestampCriterionOption extends CriterionOption { defaultModifier: CriterionModifier.GreaterThan, options, inputType: "text", + makeCriterion: () => new TimestampCriterion(this), }); } } export function createMandatoryTimestampCriterionOption(value: CriterionType) { - return new MandatoryTimestampCriterionOption(value, value, value); + return new MandatoryTimestampCriterionOption(value, value); } diff --git a/ui/v2.5/src/models/list-filter/criteria/factory.ts b/ui/v2.5/src/models/list-filter/criteria/factory.ts index d45042aa0ee..1e20451d577 100644 --- a/ui/v2.5/src/models/list-filter/criteria/factory.ts +++ b/ui/v2.5/src/models/list-filter/criteria/factory.ts @@ -1,250 +1,36 @@ -/* eslint-disable consistent-return, default-case */ -import { - StringCriterion, - NumberCriterion, - DurationCriterion, - NumberCriterionOption, - MandatoryStringCriterionOption, - NullNumberCriterionOption, - MandatoryNumberCriterionOption, - StringCriterionOption, - ILabeledIdCriterion, - BooleanCriterion, - BooleanCriterionOption, - DateCriterion, - DateCriterionOption, - TimestampCriterion, - MandatoryTimestampCriterionOption, - PathCriterionOption, -} from "./criterion"; -import { OrganizedCriterion } from "./organized"; -import { FavoriteCriterion, PerformerFavoriteCriterion } from "./favorite"; -import { HasMarkersCriterion } from "./has-markers"; -import { HasChaptersCriterion } from "./has-chapters"; -import { - PerformerIsMissingCriterionOption, - ImageIsMissingCriterionOption, - TagIsMissingCriterionOption, - SceneIsMissingCriterionOption, - IsMissingCriterion, - GalleryIsMissingCriterionOption, - StudioIsMissingCriterionOption, - MovieIsMissingCriterionOption, -} from "./is-missing"; -import { NoneCriterion } from "./none"; -import { PerformersCriterion } from "./performers"; -import { AverageResolutionCriterion, ResolutionCriterion } from "./resolution"; -import { StudiosCriterion, ParentStudiosCriterion } from "./studios"; -import { - ChildTagsCriterionOption, - ParentTagsCriterionOption, - PerformerTagsCriterionOption, - SceneTagsCriterionOption, - TagsCriterion, - TagsCriterionOption, -} from "./tags"; -import { GenderCriterion } from "./gender"; -import { CircumcisedCriterion } from "./circumcised"; -import { MoviesCriterionOption } from "./movies"; -import { GalleriesCriterion } from "./galleries"; -import { CriterionType } from "../types"; -import { InteractiveCriterion } from "./interactive"; -import { DuplicatedCriterion, PhashCriterion } from "./phash"; -import { CaptionCriterion } from "./captions"; -import { RatingCriterion } from "./rating"; -import { CountryCriterion } from "./country"; -import { StashIDCriterion } from "./stash-ids"; import * as GQL from "src/core/generated-graphql"; -import { IUIConfig } from "src/core/config"; -import { defaultRatingSystemOptions } from "src/utils/rating"; +import { SceneListFilterOptions } from "../scenes"; +import { MovieListFilterOptions } from "../movies"; +import { GalleryListFilterOptions } from "../galleries"; +import { PerformerListFilterOptions } from "../performers"; +import { ImageListFilterOptions } from "../images"; +import { SceneMarkerListFilterOptions } from "../scene-markers"; +import { StudioListFilterOptions } from "../studios"; +import { TagListFilterOptions } from "../tags"; +import { CriterionType } from "../types"; + +const filterModeOptions = { + [GQL.FilterMode.Galleries]: GalleryListFilterOptions.criterionOptions, + [GQL.FilterMode.Images]: ImageListFilterOptions.criterionOptions, + [GQL.FilterMode.Movies]: MovieListFilterOptions.criterionOptions, + [GQL.FilterMode.Performers]: PerformerListFilterOptions.criterionOptions, + [GQL.FilterMode.SceneMarkers]: SceneMarkerListFilterOptions.criterionOptions, + [GQL.FilterMode.Scenes]: SceneListFilterOptions.criterionOptions, + [GQL.FilterMode.Studios]: StudioListFilterOptions.criterionOptions, + [GQL.FilterMode.Tags]: TagListFilterOptions.criterionOptions, +}; export function makeCriteria( - config: GQL.ConfigDataFragment | undefined, + mode: GQL.FilterMode, type: CriterionType = "none" ) { - switch (type) { - case "none": - return new NoneCriterion(); - case "name": - return new StringCriterion( - new MandatoryStringCriterionOption(type, type) - ); - case "path": - return new StringCriterion(new PathCriterionOption(type, type)); - case "checksum": - return new StringCriterion( - new MandatoryStringCriterionOption("media_info.checksum", type, type) - ); - case "oshash": - return new StringCriterion( - new MandatoryStringCriterionOption("media_info.hash", type, type) - ); - case "organized": - return new OrganizedCriterion(); - case "o_counter": - case "interactive_speed": - case "scene_count": - case "marker_count": - case "image_count": - case "gallery_count": - case "performer_count": - case "performer_age": - case "tag_count": - case "file_count": - case "play_count": - return new NumberCriterion( - new MandatoryNumberCriterionOption(type, type) - ); - case "rating": - return new NumberCriterion(new NullNumberCriterionOption(type, type)); - case "rating100": - return new RatingCriterion( - new NullNumberCriterionOption("rating", type), - (config?.ui as IUIConfig)?.ratingSystemOptions ?? - defaultRatingSystemOptions - ); - case "resolution": - return new ResolutionCriterion(); - case "average_resolution": - return new AverageResolutionCriterion(); - case "video_codec": - return new StringCriterion(new StringCriterionOption(type, type)); - case "audio_codec": - return new StringCriterion(new StringCriterionOption(type, type)); - case "resume_time": - case "duration": - case "play_duration": - return new DurationCriterion( - new MandatoryNumberCriterionOption(type, type) - ); - case "favorite": - return new FavoriteCriterion(); - case "hasMarkers": - return new HasMarkersCriterion(); - case "hasChapters": - return new HasChaptersCriterion(); - case "sceneIsMissing": - return new IsMissingCriterion(SceneIsMissingCriterionOption); - case "imageIsMissing": - return new IsMissingCriterion(ImageIsMissingCriterionOption); - case "performerIsMissing": - return new IsMissingCriterion(PerformerIsMissingCriterionOption); - case "galleryIsMissing": - return new IsMissingCriterion(GalleryIsMissingCriterionOption); - case "tagIsMissing": - return new IsMissingCriterion(TagIsMissingCriterionOption); - case "studioIsMissing": - return new IsMissingCriterion(StudioIsMissingCriterionOption); - case "movieIsMissing": - return new IsMissingCriterion(MovieIsMissingCriterionOption); - case "tags": - return new TagsCriterion(TagsCriterionOption); - case "sceneTags": - return new TagsCriterion(SceneTagsCriterionOption); - case "performerTags": - return new TagsCriterion(PerformerTagsCriterionOption); - case "parentTags": - return new TagsCriterion(ParentTagsCriterionOption); - case "childTags": - return new TagsCriterion(ChildTagsCriterionOption); - case "performers": - return new PerformersCriterion(); - case "performer_favorite": - return new PerformerFavoriteCriterion(); - case "studios": - return new StudiosCriterion(); - case "parent_studios": - return new ParentStudiosCriterion(); - case "movies": - return new ILabeledIdCriterion(MoviesCriterionOption); - case "galleries": - return new GalleriesCriterion(); - case "birth_year": - case "death_year": - case "weight": - return new NumberCriterion(new NumberCriterionOption(type, type)); - case "penis_length": - return new NumberCriterion(new NumberCriterionOption(type, type)); - case "age": - return new NumberCriterion( - new MandatoryNumberCriterionOption(type, type) - ); - case "gender": - return new GenderCriterion(); - case "circumcised": - return new CircumcisedCriterion(); - case "sceneChecksum": - case "galleryChecksum": - return new StringCriterion( - new StringCriterionOption("media_info.checksum", type, "checksum") - ); - case "phash": - return new PhashCriterion(); - case "duplicated": - return new DuplicatedCriterion(); - case "country": - return new CountryCriterion(); - case "height": - case "height_cm": - return new NumberCriterion( - new NumberCriterionOption("height", "height_cm", type) - ); - // stash_id is deprecated - case "stash_id": - case "stash_id_endpoint": - return new StashIDCriterion(); - case "ethnicity": - case "hair_color": - case "eye_color": - case "measurements": - case "fake_tits": - case "career_length": - case "tattoos": - case "piercings": - case "aliases": - case "url": - case "details": - case "title": - case "director": - case "synopsis": - case "description": - case "disambiguation": - return new StringCriterion(new StringCriterionOption(type, type)); - case "scene_code": - return new StringCriterion(new StringCriterionOption(type, type, "code")); - case "interactive": - return new InteractiveCriterion(); - case "captions": - return new CaptionCriterion(); - case "parent_tag_count": - return new NumberCriterion( - new MandatoryNumberCriterionOption( - "parent_tag_count", - "parent_tag_count", - "parent_count" - ) - ); - case "child_tag_count": - return new NumberCriterion( - new MandatoryNumberCriterionOption( - "sub_tag_count", - "child_tag_count", - "child_count" - ) - ); - case "ignore_auto_tag": - return new BooleanCriterion(new BooleanCriterionOption(type, type)); - case "date": - case "birthdate": - case "death_date": - case "scene_date": - return new DateCriterion(new DateCriterionOption(type, type)); - case "created_at": - case "updated_at": - case "scene_created_at": - case "scene_updated_at": - return new TimestampCriterion( - new MandatoryTimestampCriterionOption(type, type) - ); + const criterionOptions = filterModeOptions[mode]; + + const option = criterionOptions.find((o) => o.type === type); + + if (!option) { + throw new Error(`Unknown criterion parameter name: ${type}`); } + + return option?.makeCriterion(); } diff --git a/ui/v2.5/src/models/list-filter/criteria/favorite.ts b/ui/v2.5/src/models/list-filter/criteria/favorite.ts index 362ebab93fe..5479980c3cb 100644 --- a/ui/v2.5/src/models/list-filter/criteria/favorite.ts +++ b/ui/v2.5/src/models/list-filter/criteria/favorite.ts @@ -2,7 +2,6 @@ import { BooleanCriterion, BooleanCriterionOption } from "./criterion"; export const FavoriteCriterionOption = new BooleanCriterionOption( "favourite", - "favorite", "filter_favorites" ); @@ -13,7 +12,6 @@ export class FavoriteCriterion extends BooleanCriterion { } export const PerformerFavoriteCriterionOption = new BooleanCriterionOption( - "performer_favorite", "performer_favorite", "performer_favorite" ); diff --git a/ui/v2.5/src/models/list-filter/criteria/galleries.ts b/ui/v2.5/src/models/list-filter/criteria/galleries.ts index d2331cd3adc..60368853a2d 100644 --- a/ui/v2.5/src/models/list-filter/criteria/galleries.ts +++ b/ui/v2.5/src/models/list-filter/criteria/galleries.ts @@ -1,10 +1,12 @@ import { ILabeledIdCriterion, ILabeledIdCriterionOption } from "./criterion"; +const inputType = "galleries"; + const galleriesCriterionOption = new ILabeledIdCriterionOption( "galleries", "galleries", - "galleries", - true + true, + inputType ); export class GalleriesCriterion extends ILabeledIdCriterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/gender.ts b/ui/v2.5/src/models/list-filter/criteria/gender.ts index 58f2da71210..801e922594b 100644 --- a/ui/v2.5/src/models/list-filter/criteria/gender.ts +++ b/ui/v2.5/src/models/list-filter/criteria/gender.ts @@ -6,6 +6,7 @@ export const GenderCriterionOption = new CriterionOption({ messageID: "gender", type: "gender", options: genderStrings, + makeCriterion: () => new GenderCriterion(), }); export class GenderCriterion extends StringCriterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/has-chapters.ts b/ui/v2.5/src/models/list-filter/criteria/has-chapters.ts index 12d74cbbb07..8f38783f034 100644 --- a/ui/v2.5/src/models/list-filter/criteria/has-chapters.ts +++ b/ui/v2.5/src/models/list-filter/criteria/has-chapters.ts @@ -2,9 +2,9 @@ import { CriterionOption, StringCriterion } from "./criterion"; export const HasChaptersCriterionOption = new CriterionOption({ messageID: "hasChapters", - type: "hasChapters", - parameterName: "has_chapters", + type: "has_chapters", options: [true.toString(), false.toString()], + makeCriterion: () => new HasChaptersCriterion(), }); export class HasChaptersCriterion extends StringCriterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/has-markers.ts b/ui/v2.5/src/models/list-filter/criteria/has-markers.ts index 3f4109ef175..23a72152ff5 100644 --- a/ui/v2.5/src/models/list-filter/criteria/has-markers.ts +++ b/ui/v2.5/src/models/list-filter/criteria/has-markers.ts @@ -2,9 +2,9 @@ import { CriterionOption, StringCriterion } from "./criterion"; export const HasMarkersCriterionOption = new CriterionOption({ messageID: "hasMarkers", - type: "hasMarkers", - parameterName: "has_markers", + type: "has_markers", options: [true.toString(), false.toString()], + makeCriterion: () => new HasMarkersCriterion(), }); export class HasMarkersCriterion extends StringCriterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/is-missing.ts b/ui/v2.5/src/models/list-filter/criteria/is-missing.ts index e16e79793a1..acd96ad8c60 100644 --- a/ui/v2.5/src/models/list-filter/criteria/is-missing.ts +++ b/ui/v2.5/src/models/list-filter/criteria/is-missing.ts @@ -11,25 +11,19 @@ export class IsMissingCriterion extends StringCriterion { } class IsMissingCriterionOptionClass extends CriterionOption { - constructor( - messageID: string, - value: CriterionType, - parameterName: string, - options: Option[] - ) { + constructor(messageID: string, type: CriterionType, options: Option[]) { super({ messageID, - type: value, - parameterName, + type, options, defaultModifier: CriterionModifier.Equals, + makeCriterion: () => new IsMissingCriterion(this), }); } } export const SceneIsMissingCriterionOption = new IsMissingCriterionOptionClass( "isMissing", - "sceneIsMissing", "is_missing", [ "title", @@ -48,73 +42,59 @@ export const SceneIsMissingCriterionOption = new IsMissingCriterionOptionClass( export const ImageIsMissingCriterionOption = new IsMissingCriterionOptionClass( "isMissing", - "imageIsMissing", "is_missing", ["title", "galleries", "studio", "performers", "tags"] ); export const PerformerIsMissingCriterionOption = - new IsMissingCriterionOptionClass( - "isMissing", - "performerIsMissing", - "is_missing", - [ - "url", - "twitter", - "instagram", - "ethnicity", - "country", - "hair_color", - "eye_color", - "height", - "weight", - "measurements", - "fake_tits", - "career_length", - "tattoos", - "piercings", - "aliases", - "gender", - "image", - "details", - "stash_id", - ] - ); + new IsMissingCriterionOptionClass("isMissing", "is_missing", [ + "url", + "twitter", + "instagram", + "ethnicity", + "country", + "hair_color", + "eye_color", + "height", + "weight", + "measurements", + "fake_tits", + "career_length", + "tattoos", + "piercings", + "aliases", + "gender", + "image", + "details", + "stash_id", + ]); export const GalleryIsMissingCriterionOption = - new IsMissingCriterionOptionClass( - "isMissing", - "galleryIsMissing", - "is_missing", - [ - "title", - "details", - "url", - "date", - "studio", - "performers", - "tags", - "scenes", - ] - ); + new IsMissingCriterionOptionClass("isMissing", "is_missing", [ + "title", + "details", + "url", + "date", + "studio", + "performers", + "tags", + "scenes", + ]); export const TagIsMissingCriterionOption = new IsMissingCriterionOptionClass( "isMissing", - "tagIsMissing", "is_missing", ["image"] ); export const StudioIsMissingCriterionOption = new IsMissingCriterionOptionClass( "isMissing", - "studioIsMissing", "is_missing", ["image", "stash_id", "details"] ); export const MovieIsMissingCriterionOption = new IsMissingCriterionOptionClass( "isMissing", - "movieIsMissing", "is_missing", ["front_image", "back_image", "scenes"] ); diff --git a/ui/v2.5/src/models/list-filter/criteria/movies.ts b/ui/v2.5/src/models/list-filter/criteria/movies.ts index 69cac4f9ef0..0cd7926eda1 100644 --- a/ui/v2.5/src/models/list-filter/criteria/movies.ts +++ b/ui/v2.5/src/models/list-filter/criteria/movies.ts @@ -1,10 +1,12 @@ import { ILabeledIdCriterion, ILabeledIdCriterionOption } from "./criterion"; +const inputType = "movies"; + export const MoviesCriterionOption = new ILabeledIdCriterionOption( "movies", "movies", - "movies", - false + false, + inputType ); export class MoviesCriterion extends ILabeledIdCriterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/none.ts b/ui/v2.5/src/models/list-filter/criteria/none.ts index 818f4a66538..9aabef0f9de 100644 --- a/ui/v2.5/src/models/list-filter/criteria/none.ts +++ b/ui/v2.5/src/models/list-filter/criteria/none.ts @@ -1,10 +1,6 @@ import { Criterion, StringCriterionOption } from "./criterion"; -export const NoneCriterionOption = new StringCriterionOption( - "none", - "none", - "none" -); +export const NoneCriterionOption = new StringCriterionOption("none", "none"); export class NoneCriterion extends Criterion { constructor() { super(NoneCriterionOption, "none"); diff --git a/ui/v2.5/src/models/list-filter/criteria/organized.ts b/ui/v2.5/src/models/list-filter/criteria/organized.ts index 52675020836..a62384100e5 100644 --- a/ui/v2.5/src/models/list-filter/criteria/organized.ts +++ b/ui/v2.5/src/models/list-filter/criteria/organized.ts @@ -1,7 +1,6 @@ import { BooleanCriterion, BooleanCriterionOption } from "./criterion"; export const OrganizedCriterionOption = new BooleanCriterionOption( - "organized", "organized", "organized" ); diff --git a/ui/v2.5/src/models/list-filter/criteria/performers.ts b/ui/v2.5/src/models/list-filter/criteria/performers.ts index b68d5e0b189..612c7b47b49 100644 --- a/ui/v2.5/src/models/list-filter/criteria/performers.ts +++ b/ui/v2.5/src/models/list-filter/criteria/performers.ts @@ -17,12 +17,15 @@ const modifierOptions = [ const defaultModifier = CriterionModifier.IncludesAll; +const inputType = "performers"; + export const PerformersCriterionOption = new CriterionOption({ messageID: "performers", type: "performers", - parameterName: "performers", modifierOptions, defaultModifier, + makeCriterion: () => new PerformersCriterion(), + inputType, }); export class PerformersCriterion extends Criterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/phash.ts b/ui/v2.5/src/models/list-filter/criteria/phash.ts index 5a5ec09dce2..433b06b9bb2 100644 --- a/ui/v2.5/src/models/list-filter/criteria/phash.ts +++ b/ui/v2.5/src/models/list-filter/criteria/phash.ts @@ -12,8 +12,7 @@ import { export const PhashCriterionOption = new CriterionOption({ messageID: "media_info.phash", - type: "phash", - parameterName: "phash_distance", + type: "phash_distance", inputType: "text", modifierOptions: [ CriterionModifier.Equals, @@ -21,6 +20,7 @@ export const PhashCriterionOption = new CriterionOption({ CriterionModifier.IsNull, CriterionModifier.NotNull, ], + makeCriterion: () => new PhashCriterion(), }); export class PhashCriterion extends Criterion { @@ -53,7 +53,7 @@ export class PhashCriterion extends Criterion { export const DuplicatedCriterionOption = new BooleanCriterionOption( "duplicated_phash", "duplicated", - "duplicated" + () => new DuplicatedCriterion() ); export class DuplicatedCriterion extends PhashDuplicateCriterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/resolution.ts b/ui/v2.5/src/models/list-filter/criteria/resolution.ts index 5ee0b72541d..6961e22b1b9 100644 --- a/ui/v2.5/src/models/list-filter/criteria/resolution.ts +++ b/ui/v2.5/src/models/list-filter/criteria/resolution.ts @@ -4,7 +4,12 @@ import { } from "src/core/generated-graphql"; import { stringToResolution, resolutionStrings } from "src/utils/resolution"; import { CriterionType } from "../types"; -import { CriterionOption, StringCriterion } from "./criterion"; +import { + Criterion, + CriterionOption, + CriterionValue, + StringCriterion, +} from "./criterion"; abstract class AbstractResolutionCriterion extends StringCriterion { protected toCriterionInput(): ResolutionCriterionInput | undefined { @@ -20,11 +25,13 @@ abstract class AbstractResolutionCriterion extends StringCriterion { } class ResolutionCriterionOptionType extends CriterionOption { - constructor(value: CriterionType) { + constructor( + value: CriterionType, + makeCriterion: () => Criterion + ) { super({ messageID: value, type: value, - parameterName: value, modifierOptions: [ CriterionModifier.Equals, CriterionModifier.NotEquals, @@ -32,12 +39,14 @@ class ResolutionCriterionOptionType extends CriterionOption { CriterionModifier.LessThan, ], options: resolutionStrings, + makeCriterion, }); } } export const ResolutionCriterionOption = new ResolutionCriterionOptionType( - "resolution" + "resolution", + () => new ResolutionCriterion() ); export class ResolutionCriterion extends AbstractResolutionCriterion { constructor() { @@ -46,7 +55,10 @@ export class ResolutionCriterion extends AbstractResolutionCriterion { } export const AverageResolutionCriterionOption = - new ResolutionCriterionOptionType("average_resolution"); + new ResolutionCriterionOptionType( + "average_resolution", + () => new AverageResolutionCriterion() + ); export class AverageResolutionCriterion extends AbstractResolutionCriterion { constructor() { diff --git a/ui/v2.5/src/models/list-filter/criteria/stash-ids.ts b/ui/v2.5/src/models/list-filter/criteria/stash-ids.ts index ac6be309d7c..82c63c15adb 100644 --- a/ui/v2.5/src/models/list-filter/criteria/stash-ids.ts +++ b/ui/v2.5/src/models/list-filter/criteria/stash-ids.ts @@ -10,13 +10,13 @@ import { Criterion, CriterionOption } from "./criterion"; export const StashIDCriterionOption = new CriterionOption({ messageID: "stash_id", type: "stash_id_endpoint", - parameterName: "stash_id_endpoint", modifierOptions: [ CriterionModifier.Equals, CriterionModifier.NotEquals, CriterionModifier.IsNull, CriterionModifier.NotNull, ], + makeCriterion: () => new StashIDCriterion(), }); export class StashIDCriterion extends Criterion { diff --git a/ui/v2.5/src/models/list-filter/criteria/studios.ts b/ui/v2.5/src/models/list-filter/criteria/studios.ts index 8f3bfe7d4e4..e238943c1c5 100644 --- a/ui/v2.5/src/models/list-filter/criteria/studios.ts +++ b/ui/v2.5/src/models/list-filter/criteria/studios.ts @@ -13,13 +13,15 @@ const modifierOptions = [ ]; const defaultModifier = CriterionModifier.Includes; +const inputType = "studios"; export const StudiosCriterionOption = new CriterionOption({ messageID: "studios", type: "studios", - parameterName: "studios", modifierOptions, defaultModifier, + makeCriterion: () => new StudiosCriterion(), + inputType, }); export class StudiosCriterion extends IHierarchicalLabeledIdCriterion { @@ -29,10 +31,10 @@ export class StudiosCriterion extends IHierarchicalLabeledIdCriterion { } export const ParentStudiosCriterionOption = new ILabeledIdCriterionOption( - "parent_studios", "parent_studios", "parents", - false + false, + inputType ); export class ParentStudiosCriterion extends ILabeledIdCriterion { constructor() { diff --git a/ui/v2.5/src/models/list-filter/criteria/tags.ts b/ui/v2.5/src/models/list-filter/criteria/tags.ts index d197d11ff9b..fe19fc2860c 100644 --- a/ui/v2.5/src/models/list-filter/criteria/tags.ts +++ b/ui/v2.5/src/models/list-filter/criteria/tags.ts @@ -1,7 +1,8 @@ import { CriterionModifier } from "src/core/generated-graphql"; import { CriterionOption, IHierarchicalLabeledIdCriterion } from "./criterion"; +import { CriterionType } from "../types"; -const modifierOptions = [ +const defaultModifierOptions = [ CriterionModifier.IncludesAll, CriterionModifier.Includes, CriterionModifier.Equals, @@ -17,41 +18,53 @@ const withoutEqualsModifierOptions = [ ]; const defaultModifier = CriterionModifier.IncludesAll; +const inputType = "tags"; -export const TagsCriterionOption = new CriterionOption({ - messageID: "tags", - type: "tags", - parameterName: "tags", - modifierOptions, - defaultModifier, -}); -export const SceneTagsCriterionOption = new CriterionOption({ - messageID: "sceneTags", - type: "sceneTags", - parameterName: "scene_tags", - modifierOptions, - defaultModifier, -}); -export const PerformerTagsCriterionOption = new CriterionOption({ - messageID: "performerTags", - type: "performerTags", - parameterName: "performer_tags", - modifierOptions: withoutEqualsModifierOptions, - defaultModifier, -}); -export const ParentTagsCriterionOption = new CriterionOption({ - messageID: "parent_tags", - type: "parentTags", - parameterName: "parents", - modifierOptions: withoutEqualsModifierOptions, - defaultModifier, -}); -export const ChildTagsCriterionOption = new CriterionOption({ - messageID: "sub_tags", - type: "childTags", - parameterName: "children", - modifierOptions: withoutEqualsModifierOptions, - defaultModifier, -}); +export class TagsCriterionOptionClass extends CriterionOption { + constructor( + messageID: string, + type: CriterionType, + modifierOptions: CriterionModifier[] + ) { + super({ + messageID, + type, + modifierOptions, + defaultModifier, + makeCriterion: () => new TagsCriterion(this), + inputType, + }); + } +} + +export const TagsCriterionOption = new TagsCriterionOptionClass( + "tags", + "tags", + defaultModifierOptions +); + +export const SceneTagsCriterionOption = new TagsCriterionOptionClass( + "scene_tags", + "scene_tags", + defaultModifierOptions +); + +export const PerformerTagsCriterionOption = new TagsCriterionOptionClass( + "performer_tags", + "performer_tags", + withoutEqualsModifierOptions +); + +export const ParentTagsCriterionOption = new TagsCriterionOptionClass( + "parent_tags", + "parents", + withoutEqualsModifierOptions +); + +export const ChildTagsCriterionOption = new TagsCriterionOptionClass( + "sub_tags", + "children", + withoutEqualsModifierOptions +); export class TagsCriterion extends IHierarchicalLabeledIdCriterion {} diff --git a/ui/v2.5/src/models/list-filter/filter.ts b/ui/v2.5/src/models/list-filter/filter.ts index 10f420d2334..9e6bb863001 100644 --- a/ui/v2.5/src/models/list-filter/filter.ts +++ b/ui/v2.5/src/models/list-filter/filter.ts @@ -2,11 +2,12 @@ import { ConfigDataFragment, FilterMode, FindFilterType, + SavedFilterDataFragment, SortDirectionEnum, } from "src/core/generated-graphql"; import { Criterion, CriterionValue } from "./criteria/criterion"; import { makeCriteria } from "./criteria/factory"; -import { DisplayMode } from "./types"; +import { CriterionType, DisplayMode } from "./types"; interface IDecodedParams { perPage?: number; @@ -127,7 +128,7 @@ export class ListFilterModel { for (const jsonString of params.c) { try { const encodedCriterion = JSON.parse(jsonString); - const criterion = makeCriteria(this.config, encodedCriterion.type); + const criterion = makeCriteria(this.mode, encodedCriterion.type); // it's possible that we have unsupported criteria. Just skip if so. if (criterion) { criterion.setFromEncodedCriterion(encodedCriterion); @@ -248,8 +249,41 @@ export class ListFilterModel { this.configureFromDecodedParams(decoded); } - public configureFromJSON(json: string) { - this.configureFromDecodedParams(JSON.parse(json)); + public configureFromSavedFilter(savedFilter: SavedFilterDataFragment) { + const { + find_filter: findFilter, + object_filter: objectFilter, + ui_options: uiOptions, + } = savedFilter; + + this.itemsPerPage = findFilter?.per_page ?? this.itemsPerPage; + this.sortBy = findFilter?.sort ?? this.sortBy; + // parse the random seed if provided + const match = this.sortBy?.match(/^random_(\d+)$/); + if (match) { + this.sortBy = "random"; + this.randomSeed = Number.parseInt(match[1], 10); + } + this.sortDirection = + (findFilter?.direction as SortDirectionEnum) ?? this.sortDirection; + this.searchTerm = findFilter?.q ?? this.searchTerm; + + this.displayMode = uiOptions?.display_mode ?? this.displayMode; + this.zoomIndex = uiOptions?.zoom_index ?? this.zoomIndex; + + this.currentPage = 1; + + this.criteria = []; + if (objectFilter) { + Object.keys(objectFilter).forEach((key) => { + const criterion = makeCriteria(this.mode, key as CriterionType); + // it's possible that we have unsupported criteria. Just skip if so. + if (criterion) { + criterion.setFromEncodedCriterion(objectFilter[key]); + this.criteria.push(criterion); + } + }); + } } private setRandomSeed() { @@ -405,4 +439,22 @@ export class ListFilterModel { return output; } + + public makeSavedFindFilter() { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const output: Record = {}; + this.criteria.forEach((criterion) => { + criterion.toSavedFilter(output); + }); + + return output; + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + public makeUIOptions(): Record { + return { + display_mode: this.displayMode, + zoom_index: this.zoomIndex, + }; + } } diff --git a/ui/v2.5/src/models/list-filter/galleries.ts b/ui/v2.5/src/models/list-filter/galleries.ts index 36bb65de6f5..2a597417f2a 100644 --- a/ui/v2.5/src/models/list-filter/galleries.ts +++ b/ui/v2.5/src/models/list-filter/galleries.ts @@ -45,11 +45,7 @@ const criterionOptions = [ createStringCriterionOption("title"), createStringCriterionOption("details"), createPathCriterionOption("path"), - createStringCriterionOption( - "galleryChecksum", - "media_info.checksum", - "checksum" - ), + createStringCriterionOption("checksum", "media_info.checksum"), new NullNumberCriterionOption("rating", "rating100"), OrganizedCriterionOption, AverageResolutionCriterionOption, diff --git a/ui/v2.5/src/models/list-filter/performers.ts b/ui/v2.5/src/models/list-filter/performers.ts index 2995aebb747..7c66880ee75 100644 --- a/ui/v2.5/src/models/list-filter/performers.ts +++ b/ui/v2.5/src/models/list-filter/performers.ts @@ -94,7 +94,7 @@ const criterionOptions = [ createMandatoryNumberCriterionOption("gallery_count"), createMandatoryNumberCriterionOption("o_counter"), createBooleanCriterionOption("ignore_auto_tag"), - new NumberCriterionOption("height", "height_cm", "height_cm"), + new NumberCriterionOption("height", "height_cm"), ...numberCriteria.map((c) => createNumberCriterionOption(c)), ...stringCriteria.map((c) => createStringCriterionOption(c)), createDateCriterionOption("birthdate"), diff --git a/ui/v2.5/src/models/list-filter/scenes.ts b/ui/v2.5/src/models/list-filter/scenes.ts index 106875a1251..10895829849 100644 --- a/ui/v2.5/src/models/list-filter/scenes.ts +++ b/ui/v2.5/src/models/list-filter/scenes.ts @@ -59,16 +59,12 @@ const displayModeOptions = [ const criterionOptions = [ createStringCriterionOption("title"), - createStringCriterionOption("scene_code"), + createStringCriterionOption("code", "scene_code"), createPathCriterionOption("path"), createStringCriterionOption("details"), createStringCriterionOption("director"), createMandatoryStringCriterionOption("oshash", "media_info.hash"), - createStringCriterionOption( - "sceneChecksum", - "media_info.checksum", - "checksum" - ), + createStringCriterionOption("checksum", "media_info.checksum"), PhashCriterionOption, DuplicatedCriterionOption, OrganizedCriterionOption, diff --git a/ui/v2.5/src/models/list-filter/tags.ts b/ui/v2.5/src/models/list-filter/tags.ts index 8e90a27e764..12daff1c2bb 100644 --- a/ui/v2.5/src/models/list-filter/tags.ts +++ b/ui/v2.5/src/models/list-filter/tags.ts @@ -53,17 +53,9 @@ const criterionOptions = [ createMandatoryNumberCriterionOption("performer_count"), createMandatoryNumberCriterionOption("marker_count"), ParentTagsCriterionOption, - new MandatoryNumberCriterionOption( - "parent_tag_count", - "parent_tag_count", - "parent_count" - ), + new MandatoryNumberCriterionOption("parent_tag_count", "parent_count"), ChildTagsCriterionOption, - new MandatoryNumberCriterionOption( - "sub_tag_count", - "child_tag_count", - "child_count" - ), + new MandatoryNumberCriterionOption("sub_tag_count", "child_count"), createMandatoryTimestampCriterionOption("created_at"), createMandatoryTimestampCriterionOption("updated_at"), ]; diff --git a/ui/v2.5/src/models/list-filter/types.ts b/ui/v2.5/src/models/list-filter/types.ts index a200024f743..453081ce5a3 100644 --- a/ui/v2.5/src/models/list-filter/types.ts +++ b/ui/v2.5/src/models/list-filter/types.ts @@ -112,20 +112,12 @@ export type CriterionType = | "video_codec" | "audio_codec" | "duration" - | "favorite" - | "hasMarkers" - | "sceneIsMissing" - | "imageIsMissing" - | "performerIsMissing" - | "galleryIsMissing" - | "tagIsMissing" - | "studioIsMissing" - | "movieIsMissing" + | "filter_favorites" + | "has_markers" + | "is_missing" | "tags" - | "sceneTags" - | "performerTags" - | "parentTags" - | "childTags" + | "scene_tags" + | "performer_tags" | "tag_count" | "performers" | "studios" @@ -149,7 +141,8 @@ export type CriterionType = | "piercings" | "aliases" | "gender" - | "parent_studios" + | "parents" + | "children" | "scene_count" | "marker_count" | "image_count" @@ -169,13 +162,11 @@ export type CriterionType = | "title" | "oshash" | "checksum" - | "sceneChecksum" - | "galleryChecksum" - | "phash" + | "phash_distance" | "director" | "synopsis" - | "parent_tag_count" - | "child_tag_count" + | "parent_count" + | "child_count" | "performer_favorite" | "performer_age" | "duplicated" @@ -191,6 +182,6 @@ export type CriterionType = | "scene_created_at" | "scene_updated_at" | "description" - | "scene_code" + | "code" | "disambiguation" - | "hasChapters"; + | "has_chapters"; diff --git a/ui/v2.5/src/models/sceneQueue.ts b/ui/v2.5/src/models/sceneQueue.ts index de9cf2bbee2..14f81df5925 100644 --- a/ui/v2.5/src/models/sceneQueue.ts +++ b/ui/v2.5/src/models/sceneQueue.ts @@ -9,6 +9,7 @@ export interface IPlaySceneOptions { newPage?: number; autoPlay?: boolean; continue?: boolean; + start?: number; } export class SceneQueue { @@ -117,6 +118,9 @@ export class SceneQueue { if (options.continue !== undefined) { params.push("continue=" + options.continue); } + if (options.start !== undefined) { + params.push("t=" + options.start); + } return `/scenes/${sceneID}${params.length ? "?" + params.join("&") : ""}`; } } diff --git a/ui/v2.5/src/utils/data.ts b/ui/v2.5/src/utils/data.ts index 6014d105df3..bca1cc0f8a6 100644 --- a/ui/v2.5/src/utils/data.ts +++ b/ui/v2.5/src/utils/data.ts @@ -42,3 +42,27 @@ export function excludeFields( } }); } + +export interface IHasStoredID { + stored_id?: string | null; +} + +export function sortStoredIdObjects( + scrapedObjects?: IHasStoredID[] +): IHasStoredID[] | undefined { + if (!scrapedObjects) { + return undefined; + } + const ret = scrapedObjects.filter((p) => !!p.stored_id); + + if (ret.length === 0) { + return undefined; + } + + // sort by id numerically + ret.sort((a, b) => { + return parseInt(a.stored_id!, 10) - parseInt(b.stored_id!, 10); + }); + + return ret; +} diff --git a/ui/v2.5/yarn.lock b/ui/v2.5/yarn.lock index ff8eac591e8..bcd76ada85d 100644 --- a/ui/v2.5/yarn.lock +++ b/ui/v2.5/yarn.lock @@ -8104,6 +8104,11 @@ vfile@^4.0.0: videojs-font "3.2.0" videojs-vtt.js "^0.15.4" +videojs-abloop@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/videojs-abloop/-/videojs-abloop-1.2.0.tgz#ead4054400e6107d6512553ddff2a97260decf3e" + integrity sha512-6/hvtB5gNQUr5FJ969UhXVg5H+3wxhOzh9AVftlezOXlhzzaWfNfiOJYqNKo01Gc/eSQOvfttrOX7jH+aHpwrw== + videojs-contrib-dash@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/videojs-contrib-dash/-/videojs-contrib-dash-5.1.1.tgz#9f50191677815a7d816c500977811a926aee0643"