From 5f690d96bdeb341b50ba2e91d57898b2aef9aa0b Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Wed, 13 Nov 2024 10:14:55 +1100 Subject: [PATCH 01/12] Fix stash scraper errors and add apikey field (#5474) * Use hasura/go-graphql-client instead of shurcooL version * Fix graphql query errors * Support setting api key for stash server --- go.mod | 3 +- go.sum | 6 +- pkg/plugin/examples/common/graphql.go | 2 +- pkg/plugin/util/client.go | 2 +- pkg/scraper/config.go | 3 +- pkg/scraper/graphql.go | 55 ++++++++ pkg/scraper/image.go | 33 ++++- pkg/scraper/stash.go | 122 +++++++++++++++--- .../src/docs/en/Manual/ScraperDevelopment.md | 3 +- 9 files changed, 195 insertions(+), 34 deletions(-) create mode 100644 pkg/scraper/graphql.go diff --git a/go.mod b/go.mod index d1bd409390a..f35ee773e7d 100644 --- a/go.mod +++ b/go.mod @@ -26,6 +26,7 @@ require ( github.com/gorilla/sessions v1.2.1 github.com/gorilla/websocket v1.5.0 github.com/hashicorp/golang-lru/v2 v2.0.7 + github.com/hasura/go-graphql-client v0.13.1 github.com/jinzhu/copier v0.4.0 github.com/jmoiron/sqlx v1.4.0 github.com/json-iterator/go v1.1.12 @@ -39,7 +40,6 @@ require ( github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/remeh/sizedwaitgroup v1.0.0 github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd - github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f github.com/sirupsen/logrus v1.9.3 github.com/spf13/cast v1.6.0 github.com/spf13/pflag v1.0.5 @@ -67,6 +67,7 @@ require ( github.com/asticode/go-astikit v0.20.0 // indirect github.com/asticode/go-astits v1.8.0 // indirect github.com/chromedp/sysutil v1.0.0 // indirect + github.com/coder/websocket v1.8.12 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/dlclark/regexp2 v1.7.0 // indirect diff --git a/go.sum b/go.sum index 15516f7ff98..e069fb1a196 100644 --- a/go.sum +++ b/go.sum @@ -153,6 +153,8 @@ github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/coder/websocket v1.8.12 h1:5bUXkEPPIbewrnkU8LTCLVaxi4N4J8ahufH2vlo4NAo= +github.com/coder/websocket v1.8.12/go.mod h1:LNVeNrXQZfe5qhS9ALED3uA+l5pPqvwXg3CKoDBB2gs= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= @@ -394,6 +396,8 @@ github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoI github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M= github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= +github.com/hasura/go-graphql-client v0.13.1 h1:kKbjhxhpwz58usVl+Xvgah/TDha5K2akNTRQdsEHN6U= +github.com/hasura/go-graphql-client v0.13.1/go.mod h1:k7FF7h53C+hSNFRG3++DdVZWIuHdCaTbI7siTJ//zGQ= github.com/hjson/hjson-go/v4 v4.0.0 h1:wlm6IYYqHjOdXH1gHev4VoXCaW20HdQAGCxdOEEg2cs= github.com/hjson/hjson-go/v4 v4.0.0/go.mod h1:KaYt3bTw3zhBjYqnXkYywcYctk0A2nxeEFTse3rH13E= github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= @@ -591,8 +595,6 @@ github.com/sagikazarmark/crypt v0.4.0/go.mod h1:ALv2SRj7GxYV4HO9elxH9nS6M9gW+xDN github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= -github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk= -github.com/shurcooL/graphql v0.0.0-20181231061246-d48a9a75455f/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= diff --git a/pkg/plugin/examples/common/graphql.go b/pkg/plugin/examples/common/graphql.go index 8650758a8f0..40ac8d77e6e 100644 --- a/pkg/plugin/examples/common/graphql.go +++ b/pkg/plugin/examples/common/graphql.go @@ -8,7 +8,7 @@ import ( "errors" "fmt" - "github.com/shurcooL/graphql" + graphql "github.com/hasura/go-graphql-client" "github.com/stashapp/stash/pkg/plugin/common/log" ) diff --git a/pkg/plugin/util/client.go b/pkg/plugin/util/client.go index 7b33d8678e9..37c37bfac91 100644 --- a/pkg/plugin/util/client.go +++ b/pkg/plugin/util/client.go @@ -8,7 +8,7 @@ import ( "net/url" "strconv" - "github.com/shurcooL/graphql" + graphql "github.com/hasura/go-graphql-client" "github.com/stashapp/stash/pkg/plugin/common" ) diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go index 9c51b4bba95..e19625f4528 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/config.go @@ -114,7 +114,8 @@ func (c config) validate() error { } type stashServer struct { - URL string `yaml:"url"` + URL string `yaml:"url"` + ApiKey string `yaml:"apiKey"` } type scraperTypeConfig struct { diff --git a/pkg/scraper/graphql.go b/pkg/scraper/graphql.go new file mode 100644 index 00000000000..8f582fe8448 --- /dev/null +++ b/pkg/scraper/graphql.go @@ -0,0 +1,55 @@ +package scraper + +import ( + "errors" + "strings" + + "github.com/hasura/go-graphql-client" +) + +type graphqlErrors []error + +func (e graphqlErrors) Error() string { + b := strings.Builder{} + for _, err := range e { + _, _ = b.WriteString(err.Error()) + } + return b.String() +} + +type graphqlError struct { + err graphql.Error +} + +func (e graphqlError) Error() string { + unwrapped := e.err.Unwrap() + if unwrapped != nil { + var networkErr graphql.NetworkError + if errors.As(unwrapped, &networkErr) { + if networkErr.StatusCode() == 422 { + return networkErr.Body() + } + } + } + return e.err.Error() +} + +// convertGraphqlError converts a graphql.Error or graphql.Errors into an error with a useful message. +// graphql.Error swallows important information, so we need to convert it to a more useful error type. +func convertGraphqlError(err error) error { + var gqlErrs graphql.Errors + if errors.As(err, &gqlErrs) { + ret := make(graphqlErrors, len(gqlErrs)) + for i, e := range gqlErrs { + ret[i] = convertGraphqlError(e) + } + return ret + } + + var gqlErr graphql.Error + if errors.As(err, &gqlErr) { + return graphqlError{gqlErr} + } + + return err +} diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go index 193ddc517b6..ee82d2f2105 100644 --- a/pkg/scraper/image.go +++ b/pkg/scraper/image.go @@ -122,13 +122,19 @@ func setGroupBackImage(ctx context.Context, client *http.Client, m *models.Scrap return nil } -func getImage(ctx context.Context, url string, client *http.Client, globalConfig GlobalConfig) (*string, error) { +type imageGetter struct { + client *http.Client + globalConfig GlobalConfig + requestModifier func(req *http.Request) +} + +func (i *imageGetter) getImage(ctx context.Context, url string) (*string, error) { req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) if err != nil { return nil, err } - userAgent := globalConfig.GetScraperUserAgent() + userAgent := i.globalConfig.GetScraperUserAgent() if userAgent != "" { req.Header.Set("User-Agent", userAgent) } @@ -140,7 +146,11 @@ func getImage(ctx context.Context, url string, client *http.Client, globalConfig req.Header.Set("Referer", req.URL.Scheme+"://"+req.Host+"/") } - resp, err := client.Do(req) + if i.requestModifier != nil { + i.requestModifier(req) + } + + resp, err := i.client.Do(req) if err != nil { return nil, err @@ -167,10 +177,19 @@ func getImage(ctx context.Context, url string, client *http.Client, globalConfig return &img, nil } -func getStashPerformerImage(ctx context.Context, stashURL string, performerID string, client *http.Client, globalConfig GlobalConfig) (*string, error) { - return getImage(ctx, stashURL+"/performer/"+performerID+"/image", client, globalConfig) +func getImage(ctx context.Context, url string, client *http.Client, globalConfig GlobalConfig) (*string, error) { + g := imageGetter{ + client: client, + globalConfig: globalConfig, + } + + return g.getImage(ctx, url) +} + +func getStashPerformerImage(ctx context.Context, stashURL string, performerID string, imageGetter imageGetter) (*string, error) { + return imageGetter.getImage(ctx, stashURL+"/performer/"+performerID+"/image") } -func getStashSceneImage(ctx context.Context, stashURL string, sceneID string, client *http.Client, globalConfig GlobalConfig) (*string, error) { - return getImage(ctx, stashURL+"/scene/"+sceneID+"/screenshot", client, globalConfig) +func getStashSceneImage(ctx context.Context, stashURL string, sceneID string, imageGetter imageGetter) (*string, error) { + return imageGetter.getImage(ctx, stashURL+"/scene/"+sceneID+"/screenshot") } diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index a50db8b5e61..3e28a3e9916 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -4,9 +4,11 @@ import ( "context" "fmt" "net/http" + "strconv" + "strings" + graphql "github.com/hasura/go-graphql-client" "github.com/jinzhu/copier" - "github.com/shurcooL/graphql" "github.com/stashapp/stash/pkg/models" ) @@ -27,9 +29,21 @@ func newStashScraper(scraper scraperTypeConfig, client *http.Client, config conf } } +func setApiKeyHeader(apiKey string) func(req *http.Request) { + return func(req *http.Request) { + req.Header.Set("ApiKey", apiKey) + } +} + func (s *stashScraper) getStashClient() *graphql.Client { - url := s.config.StashServer.URL - return graphql.NewClient(url+"/graphql", nil) + url := s.config.StashServer.URL + "/graphql" + ret := graphql.NewClient(url, s.client) + + if s.config.StashServer.ApiKey != "" { + ret = ret.WithRequestModifier(setApiKeyHeader(s.config.StashServer.ApiKey)) + } + + return ret } type stashFindPerformerNamePerformer struct { @@ -58,14 +72,12 @@ type scrapedTagStash struct { type scrapedPerformerStash struct { Name *string `graphql:"name" json:"name"` Gender *string `graphql:"gender" json:"gender"` - URL *string `graphql:"url" json:"url"` - Twitter *string `graphql:"twitter" json:"twitter"` - Instagram *string `graphql:"instagram" json:"instagram"` + URLs []string `graphql:"urls" json:"urls"` Birthdate *string `graphql:"birthdate" json:"birthdate"` Ethnicity *string `graphql:"ethnicity" json:"ethnicity"` Country *string `graphql:"country" json:"country"` EyeColor *string `graphql:"eye_color" json:"eye_color"` - Height *string `graphql:"height" json:"height"` + Height *int `graphql:"height_cm" json:"height_cm"` Measurements *string `graphql:"measurements" json:"measurements"` FakeTits *string `graphql:"fake_tits" json:"fake_tits"` PenisLength *string `graphql:"penis_length" json:"penis_length"` @@ -73,12 +85,25 @@ type scrapedPerformerStash struct { CareerLength *string `graphql:"career_length" json:"career_length"` Tattoos *string `graphql:"tattoos" json:"tattoos"` Piercings *string `graphql:"piercings" json:"piercings"` - Aliases *string `graphql:"aliases" json:"aliases"` + Aliases []string `graphql:"alias_list" json:"alias_list"` Tags []*scrapedTagStash `graphql:"tags" json:"tags"` Details *string `graphql:"details" json:"details"` DeathDate *string `graphql:"death_date" json:"death_date"` HairColor *string `graphql:"hair_color" json:"hair_color"` - Weight *string `graphql:"weight" json:"weight"` + Weight *int `graphql:"weight" json:"weight"` +} + +func (s *stashScraper) imageGetter() imageGetter { + ret := imageGetter{ + client: s.client, + globalConfig: s.globalConfig, + } + + if s.config.StashServer.ApiKey != "" { + ret.requestModifier = setApiKeyHeader(s.config.StashServer.ApiKey) + } + + return ret } func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (ScrapedContent, error) { @@ -102,12 +127,12 @@ func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap // get the id from the URL field vars := map[string]interface{}{ - "f": performerID, + "f": graphql.ID(performerID), } err := client.Query(ctx, &q, vars) if err != nil { - return nil, err + return nil, convertGraphqlError(err) } // need to copy back to a scraped performer @@ -117,11 +142,28 @@ func (s *stashScraper) scrapeByFragment(ctx context.Context, input Input) (Scrap return nil, err } + // convert alias list to aliases + aliasStr := strings.Join(q.FindPerformer.Aliases, ", ") + ret.Aliases = &aliasStr + + // convert numeric to string + if q.FindPerformer.Height != nil { + heightStr := strconv.Itoa(*q.FindPerformer.Height) + ret.Height = &heightStr + } + if q.FindPerformer.Weight != nil { + weightStr := strconv.Itoa(*q.FindPerformer.Weight) + ret.Weight = &weightStr + } + // get the performer image directly - ret.Image, err = getStashPerformerImage(ctx, s.config.StashServer.URL, performerID, s.client, s.globalConfig) + ig := s.imageGetter() + img, err := getStashPerformerImage(ctx, s.config.StashServer.URL, performerID, ig) if err != nil { return nil, err } + ret.Images = []string{*img} + ret.Image = img return &ret, nil } @@ -143,8 +185,15 @@ func (s *stashScraper) scrapedStashSceneToScrapedScene(ctx context.Context, scen return nil, err } - // get the performer image directly - ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, scene.ID, s.client, s.globalConfig) + // convert first in files to file + if len(scene.Files) > 0 { + f := scene.Files[0].SceneFileType() + ret.File = &f + } + + // get the scene image directly + ig := s.imageGetter() + ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, scene.ID, ig) if err != nil { return nil, err } @@ -175,7 +224,7 @@ func (s *stashScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC err := client.Query(ctx, &q, vars) if err != nil { - return nil, err + return nil, convertGraphqlError(err) } for _, scene := range q.FindScenes.Scenes { @@ -207,13 +256,41 @@ func (s *stashScraper) scrapeByName(ctx context.Context, name string, ty ScrapeC return nil, ErrNotSupported } +type stashVideoFile struct { + Size int64 `graphql:"size" json:"size"` + Duration float64 `graphql:"duration" json:"duration"` + VideoCodec string `graphql:"video_codec" json:"video_codec"` + AudioCodec string `graphql:"audio_codec" json:"audio_codec"` + Width int `graphql:"width" json:"width"` + Height int `graphql:"height" json:"height"` + Framerate float64 `graphql:"frame_rate" json:"frame_rate"` + Bitrate int `graphql:"bit_rate" json:"bit_rate"` +} + +func (f stashVideoFile) SceneFileType() models.SceneFileType { + ret := models.SceneFileType{ + Duration: &f.Duration, + VideoCodec: &f.VideoCodec, + AudioCodec: &f.AudioCodec, + Width: &f.Width, + Height: &f.Height, + Framerate: &f.Framerate, + Bitrate: &f.Bitrate, + } + + size := strconv.FormatInt(f.Size, 10) + ret.Size = &size + + return ret +} + type scrapedSceneStash struct { ID string `graphql:"id" json:"id"` Title *string `graphql:"title" json:"title"` Details *string `graphql:"details" json:"details"` - URL *string `graphql:"url" json:"url"` + URLs []string `graphql:"urls" json:"urls"` Date *string `graphql:"date" json:"date"` - File *models.SceneFileType `graphql:"file" json:"file"` + Files []stashVideoFile `graphql:"files" json:"files"` Studio *scrapedStudioStash `graphql:"studio" json:"studio"` Tags []*scrapedTagStash `graphql:"tags" json:"tags"` Performers []*scrapedPerformerStash `graphql:"performers" json:"performers"` @@ -239,12 +316,16 @@ func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce } vars := map[string]interface{}{ - "c": &input, + "c": input, } client := s.getStashClient() if err := client.Query(ctx, &q, vars); err != nil { - return nil, err + return nil, convertGraphqlError(err) + } + + if q.FindScene == nil { + return nil, nil } // need to copy back to a scraped scene @@ -254,7 +335,8 @@ func (s *stashScraper) scrapeSceneByScene(ctx context.Context, scene *models.Sce } // get the performer image directly - ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, q.FindScene.ID, s.client, s.globalConfig) + ig := s.imageGetter() + ret.Image, err = getStashSceneImage(ctx, s.config.StashServer.URL, q.FindScene.ID, ig) if err != nil { return nil, err } diff --git a/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md b/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md index caa3d41dc80..a0ac30547c2 100644 --- a/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md +++ b/ui/v2.5/src/docs/en/Manual/ScraperDevelopment.md @@ -247,7 +247,7 @@ sceneByURL: A different stash server can be configured as a scraping source. This action applies only to `performerByName`, `performerByFragment`, and `sceneByFragment` types. This action requires that the top-level `stashServer` field is configured. -`stashServer` contains a single `url` field for the remote stash server. The username and password can be embedded in this string using `username:password@host`. +`stashServer` contains a single `url` field for the remote stash server. The username and password can be embedded in this string using `username:password@host`. Alternatively, the `apiKey` field can be used to authenticate with the remote stash server. An example stash scrape configuration is below: @@ -260,6 +260,7 @@ performerByFragment: sceneByFragment: action: stash stashServer: + apiKey: url: http://stashserver.com:9999 ``` From 6c5bf5f052488604afde05e554591bfee40b4cc1 Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Fri, 22 Nov 2024 08:27:23 +1100 Subject: [PATCH 02/12] Convert json numbers to numbers (#5496) --- internal/api/resolver_mutation_configure.go | 16 ++++++ pkg/utils/json.go | 16 ++++++ pkg/utils/map.go | 17 +++++++ pkg/utils/map_test.go | 55 +++++++++++++++++++++ 4 files changed, 104 insertions(+) create mode 100644 pkg/utils/json.go diff --git a/internal/api/resolver_mutation_configure.go b/internal/api/resolver_mutation_configure.go index 34b627b3c66..c4356ff5857 100644 --- a/internal/api/resolver_mutation_configure.go +++ b/internal/api/resolver_mutation_configure.go @@ -2,6 +2,7 @@ package api import ( "context" + "encoding/json" "errors" "fmt" "path/filepath" @@ -643,10 +644,14 @@ func (r *mutationResolver) ConfigureUI(ctx context.Context, input map[string]int c := config.GetInstance() if input != nil { + // #5483 - convert JSON numbers to float64 or int64 + input = utils.ConvertMapJSONNumbers(input) c.SetUIConfiguration(input) } if partial != nil { + // #5483 - convert JSON numbers to float64 or int64 + partial = utils.ConvertMapJSONNumbers(partial) // merge partial into existing config existing := c.GetUIConfiguration() utils.MergeMaps(existing, partial) @@ -664,6 +669,14 @@ func (r *mutationResolver) ConfigureUISetting(ctx context.Context, key string, v c := config.GetInstance() cfg := utils.NestedMap(c.GetUIConfiguration()) + + // #5483 - convert JSON numbers to float64 or int64 + if m, ok := value.(map[string]interface{}); ok { + value = utils.ConvertMapJSONNumbers(m) + } else if n, ok := value.(json.Number); ok { + value = utils.JSONNumberToNumber(n) + } + cfg.Set(key, value) return r.ConfigureUI(ctx, cfg, nil) @@ -671,6 +684,9 @@ func (r *mutationResolver) ConfigureUISetting(ctx context.Context, key string, v func (r *mutationResolver) ConfigurePlugin(ctx context.Context, pluginID string, input map[string]interface{}) (map[string]interface{}, error) { c := config.GetInstance() + + // #5483 - convert JSON numbers to float64 or int64 + input = utils.ConvertMapJSONNumbers(input) c.SetPluginConfiguration(pluginID, input) if err := c.Write(); err != nil { diff --git a/pkg/utils/json.go b/pkg/utils/json.go new file mode 100644 index 00000000000..ae69180688c --- /dev/null +++ b/pkg/utils/json.go @@ -0,0 +1,16 @@ +package utils + +import ( + "encoding/json" + "strings" +) + +// JSONNumberToNumber converts a JSON number to either a float64 or int64. +func JSONNumberToNumber(n json.Number) interface{} { + if strings.Contains(string(n), ".") { + f, _ := n.Float64() + return f + } + ret, _ := n.Int64() + return ret +} diff --git a/pkg/utils/map.go b/pkg/utils/map.go index 0c555857443..dbef17646b2 100644 --- a/pkg/utils/map.go +++ b/pkg/utils/map.go @@ -1,6 +1,7 @@ package utils import ( + "encoding/json" "strings" ) @@ -79,3 +80,19 @@ func MergeMaps(dest map[string]interface{}, src map[string]interface{}) { dest[k] = v } } + +// ConvertMapJSONNumbers converts all JSON numbers in a map to either float64 or int64. +func ConvertMapJSONNumbers(m map[string]interface{}) (ret map[string]interface{}) { + ret = make(map[string]interface{}) + for k, v := range m { + if n, ok := v.(json.Number); ok { + ret[k] = JSONNumberToNumber(n) + } else if mm, ok := v.(map[string]interface{}); ok { + ret[k] = ConvertMapJSONNumbers(mm) + } else { + ret[k] = v + } + } + + return ret +} diff --git a/pkg/utils/map_test.go b/pkg/utils/map_test.go index 54dfacedd30..142cd639321 100644 --- a/pkg/utils/map_test.go +++ b/pkg/utils/map_test.go @@ -1,8 +1,11 @@ package utils import ( + "encoding/json" "reflect" "testing" + + "github.com/stretchr/testify/assert" ) func TestNestedMapGet(t *testing.T) { @@ -279,3 +282,55 @@ func TestMergeMaps(t *testing.T) { }) } } + +func TestConvertMapJSONNumbers(t *testing.T) { + tests := []struct { + name string + input map[string]interface{} + expected map[string]interface{} + }{ + { + name: "Convert JSON numbers to numbers", + input: map[string]interface{}{ + "int": json.Number("12"), + "float": json.Number("12.34"), + "string": "foo", + }, + expected: map[string]interface{}{ + "int": int64(12), + "float": 12.34, + "string": "foo", + }, + }, + { + name: "Convert JSON numbers to numbers in nested maps", + input: map[string]interface{}{ + "foo": map[string]interface{}{ + "int": json.Number("56"), + "float": json.Number("56.78"), + "nested-string": "bar", + }, + "int": json.Number("12"), + "float": json.Number("12.34"), + "string": "foo", + }, + expected: map[string]interface{}{ + "foo": map[string]interface{}{ + "int": int64(56), + "float": 56.78, + "nested-string": "bar", + }, + "int": int64(12), + "float": 12.34, + "string": "foo", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := ConvertMapJSONNumbers(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} From f81202660c8a9b16ce2e83ed2644dfcfb7cde830 Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Fri, 22 Nov 2024 08:27:41 +1100 Subject: [PATCH 03/12] Validate tagger blacklist entries (#5497) * Don't let invalid tagger regex crash UI * Validate blacklist entries and show errors --- .../src/components/Tagger/scenes/Config.tsx | 160 +++++++++++------- ui/v2.5/src/components/Tagger/utils.ts | 20 ++- ui/v2.5/src/locales/en-GB.json | 3 + 3 files changed, 115 insertions(+), 68 deletions(-) diff --git a/ui/v2.5/src/components/Tagger/scenes/Config.tsx b/ui/v2.5/src/components/Tagger/scenes/Config.tsx index 1ed137fb04a..f15fbd250f5 100644 --- a/ui/v2.5/src/components/Tagger/scenes/Config.tsx +++ b/ui/v2.5/src/components/Tagger/scenes/Config.tsx @@ -1,5 +1,5 @@ import { faTimes } from "@fortawesome/free-solid-svg-icons"; -import React, { useRef, useContext } from "react"; +import React, { useContext, useState } from "react"; import { Badge, Button, @@ -14,41 +14,110 @@ import { Icon } from "src/components/Shared/Icon"; import { ParseMode, TagOperation } from "../constants"; import { TaggerStateContext } from "../context"; -interface IConfigProps { - show: boolean; -} - -const Config: React.FC = ({ show }) => { - const { config, setConfig } = useContext(TaggerStateContext); +const Blacklist: React.FC<{ + list: string[]; + setList: (blacklist: string[]) => void; +}> = ({ list, setList }) => { const intl = useIntl(); - const blacklistRef = useRef(null); - function addBlacklistItem() { - if (!blacklistRef.current) return; + const [currentValue, setCurrentValue] = useState(""); + const [error, setError] = useState(); - const input = blacklistRef.current.value; - if (!input) return; + function addBlacklistItem() { + if (!currentValue) return; // don't add duplicate items - if (!config.blacklist.includes(input)) { - setConfig({ - ...config, - blacklist: [...config.blacklist, input], - }); + if (list.includes(currentValue)) { + setError( + intl.formatMessage({ + id: "component_tagger.config.errors.blacklist_duplicate", + }) + ); + return; + } + + // validate regex + try { + new RegExp(currentValue); + } catch (e) { + setError((e as SyntaxError).message); + return; } - blacklistRef.current.value = ""; + setList([...list, currentValue]); + + setCurrentValue(""); } function removeBlacklistItem(index: number) { - const newBlacklist = [...config.blacklist]; + const newBlacklist = [...list]; newBlacklist.splice(index, 1); - setConfig({ - ...config, - blacklist: newBlacklist, - }); + setList(newBlacklist); } + return ( +
+
+ +
+ + + { + setCurrentValue(e.currentTarget.value); + setError(undefined); + }} + onKeyDown={(e: React.KeyboardEvent) => { + if (e.key === "Enter") { + addBlacklistItem(); + e.preventDefault(); + } + }} + isInvalid={!!error} + /> + + + + {error} + + +
+ {intl.formatMessage( + { id: "component_tagger.config.blacklist_desc" }, + { chars_require_escape: [\^$.|?*+() } + )} +
+ {list.map((item, index) => ( + + {item.toString()} + + + ))} +
+ ); +}; + +interface IConfigProps { + show: boolean; +} + +const Config: React.FC = ({ show }) => { + const { config, setConfig } = useContext(TaggerStateContext); + const intl = useIntl(); + return ( @@ -198,47 +267,10 @@ const Config: React.FC = ({ show }) => {
-
- -
- - ) => { - if (e.key === "Enter") { - addBlacklistItem(); - e.preventDefault(); - } - }} - /> - - - - -
- {intl.formatMessage( - { id: "component_tagger.config.blacklist_desc" }, - { chars_require_escape: [\^$.|?*+() } - )} -
- {config.blacklist.map((item, index) => ( - - {item.toString()} - - - ))} + setConfig({ ...config, blacklist })} + />
diff --git a/ui/v2.5/src/components/Tagger/utils.ts b/ui/v2.5/src/components/Tagger/utils.ts index 6bac6cb0428..92f5960406d 100644 --- a/ui/v2.5/src/components/Tagger/utils.ts +++ b/ui/v2.5/src/components/Tagger/utils.ts @@ -83,6 +83,17 @@ export function prepareQueryString( mode: ParseMode, blacklist: string[] ) { + const regexs = blacklist + .map((b) => { + try { + return new RegExp(b, "gi"); + } catch { + // ignore + return null; + } + }) + .filter((r) => r !== null) as RegExp[]; + if ((mode === "auto" && scene.date && scene.studio) || mode === "metadata") { let str = [ scene.date, @@ -92,8 +103,8 @@ export function prepareQueryString( ] .filter((s) => s !== "") .join(" "); - blacklist.forEach((b) => { - str = str.replace(new RegExp(b, "gi"), " "); + regexs.forEach((re) => { + str = str.replace(re, " "); }); return str; } @@ -106,8 +117,9 @@ export function prepareQueryString( } else if (mode === "dir" && paths.length) { s = paths[paths.length - 1]; } - blacklist.forEach((b) => { - s = s.replace(new RegExp(b, "gi"), " "); + + regexs.forEach((re) => { + s = s.replace(re, " "); }); s = parseDate(s); return s.replace(/\./g, " ").replace(/ +/g, " "); diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index 784579c95b8..143632af005 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -173,6 +173,9 @@ "active_instance": "Active stash-box instance:", "blacklist_desc": "Blacklist items are excluded from queries. Note that they are regular expressions and also case-insensitive. Certain characters must be escaped with a backslash: {chars_require_escape}", "blacklist_label": "Blacklist", + "errors": { + "blacklist_duplicate": "Duplicate blacklist item" + }, "mark_organized_desc": "Immediately mark the scene as Organized after the Save button is clicked.", "mark_organized_label": "Mark as Organized on save", "query_mode_auto": "Auto", From ef2231f97b5103b687b24f48a28587839b4aae85 Mon Sep 17 00:00:00 2001 From: DogmaDragon <103123951+DogmaDragon@users.noreply.github.com> Date: Thu, 21 Nov 2024 23:28:09 +0200 Subject: [PATCH 04/12] Update tripwire link (#5493) --- internal/api/authentication.go | 4 ++-- internal/manager/config/config.go | 2 +- pkg/session/authentication.go | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/internal/api/authentication.go b/internal/api/authentication.go index 00ef58e5ea5..03b446d868d 100644 --- a/internal/api/authentication.go +++ b/internal/api/authentication.go @@ -16,12 +16,12 @@ import ( const ( tripwireActivatedErrMsg = "Stash is exposed to the public internet without authentication, and is not serving any more content to protect your privacy. " + - "More information and fixes are available at https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet" + "More information and fixes are available at https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet" externalAccessErrMsg = "You have attempted to access Stash over the internet, and authentication is not enabled. " + "This is extremely dangerous! The whole world can see your your stash page and browse your files! " + "Stash is not answering any other requests to protect your privacy. " + - "Please read the log entry or visit https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet" + "Please read the log entry or visit https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet" ) func allowUnauthenticated(r *http.Request) bool { diff --git a/internal/manager/config/config.go b/internal/manager/config/config.go index 6a568c1da0b..aa7999c5386 100644 --- a/internal/manager/config/config.go +++ b/internal/manager/config/config.go @@ -1533,7 +1533,7 @@ func (i *Config) GetDefaultGenerateSettings() *models.GenerateMetadataOptions { } // GetDangerousAllowPublicWithoutAuth determines if the security feature is enabled. -// See https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet +// See https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet func (i *Config) GetDangerousAllowPublicWithoutAuth() bool { return i.getBool(dangerousAllowPublicWithoutAuth) } diff --git a/pkg/session/authentication.go b/pkg/session/authentication.go index d9a02314b80..b0d1b4f5046 100644 --- a/pkg/session/authentication.go +++ b/pkg/session/authentication.go @@ -81,6 +81,6 @@ func LogExternalAccessError(err ExternalAccessError) { "You probably forwarded a port from your router. At the very least, add a password to stash in the settings. \n"+ "Stash will not serve requests until you edit config.yml, remove the security_tripwire_accessed_from_public_internet key and restart stash. \n"+ "This behaviour can be overridden (but not recommended) by setting dangerous_allow_public_without_auth to true in config.yml. \n"+ - "More information is available at https://docs.stashapp.cc/networking/authentication-required-when-accessing-stash-from-the-internet \n"+ + "More information is available at https://docs.stashapp.cc/faq/setup/#protecting-against-accidental-exposure-to-the-internet \n"+ "Stash is not answering any other requests to protect your privacy.", net.IP(err).String()) } From 3c81d3b154ff25971b450f4bcab610a5f821bc0b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Nov 2024 09:28:16 +1100 Subject: [PATCH 05/12] Bump cross-spawn from 7.0.3 to 7.0.6 in /ui/v2.5 (#5486) Bumps [cross-spawn](https://github.com/moxystudio/node-cross-spawn) from 7.0.3 to 7.0.6. - [Changelog](https://github.com/moxystudio/node-cross-spawn/blob/master/CHANGELOG.md) - [Commits](https://github.com/moxystudio/node-cross-spawn/compare/v7.0.3...v7.0.6) --- updated-dependencies: - dependency-name: cross-spawn dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- ui/v2.5/yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ui/v2.5/yarn.lock b/ui/v2.5/yarn.lock index 733e10d2565..aa27a347444 100644 --- a/ui/v2.5/yarn.lock +++ b/ui/v2.5/yarn.lock @@ -3498,9 +3498,9 @@ cross-inspect@1.0.0: tslib "^2.4.0" cross-spawn@^7.0.2: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + version "7.0.6" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" + integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== dependencies: path-key "^3.1.0" shebang-command "^2.0.0" From e097f2b3f45fa4b9ca3101cad07f76282fd04fef Mon Sep 17 00:00:00 2001 From: dogwithakeyboard <128322708+dogwithakeyboard@users.noreply.github.com> Date: Wed, 27 Nov 2024 22:31:37 +0000 Subject: [PATCH 06/12] Tagger preview scrubber and thumbnail (#5507) --- .../components/Tagger/scenes/SceneTagger.tsx | 2 ++ .../components/Tagger/scenes/TaggerScene.tsx | 27 ++++++++++++++++++- ui/v2.5/src/components/Tagger/styles.scss | 11 +++----- 3 files changed, 32 insertions(+), 8 deletions(-) diff --git a/ui/v2.5/src/components/Tagger/scenes/SceneTagger.tsx b/ui/v2.5/src/components/Tagger/scenes/SceneTagger.tsx index ab6bd226e33..67ea5ea5d06 100755 --- a/ui/v2.5/src/components/Tagger/scenes/SceneTagger.tsx +++ b/ui/v2.5/src/components/Tagger/scenes/SceneTagger.tsx @@ -69,6 +69,8 @@ const Scene: React.FC<{ : undefined } showLightboxImage={showLightboxImage} + queue={queue} + index={index} > {searchResult && searchResult.results?.length ? ( diff --git a/ui/v2.5/src/components/Tagger/scenes/TaggerScene.tsx b/ui/v2.5/src/components/Tagger/scenes/TaggerScene.tsx index 01cad14787a..a7156318f8a 100644 --- a/ui/v2.5/src/components/Tagger/scenes/TaggerScene.tsx +++ b/ui/v2.5/src/components/Tagger/scenes/TaggerScene.tsx @@ -1,6 +1,6 @@ import React, { useState, useContext, PropsWithChildren, useMemo } from "react"; import * as GQL from "src/core/generated-graphql"; -import { Link } from "react-router-dom"; +import { Link, useHistory } from "react-router-dom"; import { Button, Collapse, Form, InputGroup } from "react-bootstrap"; import { FormattedMessage } from "react-intl"; @@ -19,6 +19,8 @@ import { } from "@fortawesome/free-solid-svg-icons"; import { objectPath, objectTitle } from "src/core/files"; import { ExternalLink } from "src/components/Shared/ExternalLink"; +import { ConfigurationContext } from "src/hooks/Config"; +import { SceneQueue } from "src/models/sceneQueue"; interface ITaggerSceneDetails { scene: GQL.SlimSceneDataFragment; @@ -91,6 +93,8 @@ interface ITaggerScene { scrapeSceneFragment?: (scene: GQL.SlimSceneDataFragment) => void; loading?: boolean; showLightboxImage: (imagePath: string) => void; + queue?: SceneQueue; + index?: number; } export const TaggerScene: React.FC> = ({ @@ -102,6 +106,8 @@ export const TaggerScene: React.FC> = ({ errorMessage, children, showLightboxImage, + queue, + index, }) => { const { config } = useContext(TaggerStateContext); const [queryString, setQueryString] = useState(""); @@ -125,6 +131,11 @@ export const TaggerScene: React.FC> = ({ const height = file?.height ? file.height : 0; const isPortrait = height > width; + const history = useHistory(); + + const { configuration } = React.useContext(ConfigurationContext); + const cont = configuration?.interface.continuePlaylistDefault ?? false; + async function query() { if (!doSceneQuery) return; @@ -213,6 +224,18 @@ export const TaggerScene: React.FC> = ({ } } + function onScrubberClick(timestamp: number) { + const link = queue + ? queue.makeLink(scene.id, { + sceneIndex: index, + continue: cont, + start: timestamp, + }) + : `/scenes/${scene.id}?t=${timestamp}`; + + history.push(link); + } + return (
@@ -224,6 +247,8 @@ export const TaggerScene: React.FC> = ({ video={scene.paths.preview ?? undefined} isPortrait={isPortrait} soundActive={false} + vttPath={scene.paths.vtt ?? undefined} + onScrubberClick={onScrubberClick} /> {maybeRenderSpriteIcon()} diff --git a/ui/v2.5/src/components/Tagger/styles.scss b/ui/v2.5/src/components/Tagger/styles.scss index 5fcff5baf02..b7fd576bf0d 100644 --- a/ui/v2.5/src/components/Tagger/styles.scss +++ b/ui/v2.5/src/components/Tagger/styles.scss @@ -12,22 +12,19 @@ .scene-card-preview { border-radius: 3px; + color: $text-color; + height: 100px; margin-bottom: 0; - max-height: 100px; overflow: hidden; - width: 150px; - - &-video { - background-color: #495b68; - } + width: auto; } .sprite-button { - bottom: 5px; filter: drop-shadow(1px 1px 1px #222); padding: 0; position: absolute; right: 5px; + top: 5px; } .sub-content { From 6ad09518788c9a38725a5ac74249fffa69ebc341 Mon Sep 17 00:00:00 2001 From: dogwithakeyboard <128322708+dogwithakeyboard@users.noreply.github.com> Date: Fri, 29 Nov 2024 05:28:10 +0000 Subject: [PATCH 07/12] Scene Marker duration filter and sort (#5472) Co-authored-by: WithoutPants <53250216+WithoutPants@users.noreply.github.com> --- graphql/schema/types/filters.graphql | 2 + pkg/models/scene_marker.go | 2 + pkg/sqlite/scene_marker.go | 4 + pkg/sqlite/scene_marker_filter.go | 1 + pkg/sqlite/scene_marker_test.go | 110 ++++++++++++++++++ pkg/sqlite/setup_test.go | 12 ++ .../models/list-filter/criteria/criterion.ts | 23 +++- .../src/models/list-filter/scene-markers.ts | 3 + 8 files changed, 155 insertions(+), 2 deletions(-) diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index f0f84efda8c..23396a98ffd 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -193,6 +193,8 @@ input SceneMarkerFilterType { performers: MultiCriterionInput "Filter to only include scene markers from these scenes" scenes: MultiCriterionInput + "Filter by duration (in seconds)" + duration: FloatCriterionInput "Filter by creation time" created_at: TimestampCriterionInput "Filter by last update time" diff --git a/pkg/models/scene_marker.go b/pkg/models/scene_marker.go index 8c4598a6df4..82f9faa1918 100644 --- a/pkg/models/scene_marker.go +++ b/pkg/models/scene_marker.go @@ -11,6 +11,8 @@ type SceneMarkerFilterType struct { Performers *MultiCriterionInput `json:"performers"` // Filter to only include scene markers from these scenes Scenes *MultiCriterionInput `json:"scenes"` + // Filter by duration (in seconds) + Duration *FloatCriterionInput `json:"duration"` // Filter by created at CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at diff --git a/pkg/sqlite/scene_marker.go b/pkg/sqlite/scene_marker.go index 8b2306eab4b..ed98d0ef74a 100644 --- a/pkg/sqlite/scene_marker.go +++ b/pkg/sqlite/scene_marker.go @@ -367,6 +367,7 @@ var sceneMarkerSortOptions = sortOptions{ "scenes_updated_at", "seconds", "updated_at", + "duration", } func (qb *SceneMarkerStore) setSceneMarkerSort(query *queryBuilder, findFilter *models.FindFilterType) error { @@ -386,6 +387,9 @@ func (qb *SceneMarkerStore) setSceneMarkerSort(query *queryBuilder, findFilter * case "title": query.join(tagTable, "", "scene_markers.primary_tag_id = tags.id") query.sortAndPagination += " ORDER BY COALESCE(NULLIF(scene_markers.title,''), tags.name) COLLATE NATURAL_CI " + direction + case "duration": + sort = "(scene_markers.end_seconds - scene_markers.seconds)" + query.sortAndPagination += getSort(sort, direction, sceneMarkerTable) default: query.sortAndPagination += getSort(sort, direction, sceneMarkerTable) } diff --git a/pkg/sqlite/scene_marker_filter.go b/pkg/sqlite/scene_marker_filter.go index d5e044e85a7..34fa0f39b36 100644 --- a/pkg/sqlite/scene_marker_filter.go +++ b/pkg/sqlite/scene_marker_filter.go @@ -41,6 +41,7 @@ func (qb *sceneMarkerFilterHandler) criterionHandler() criterionHandler { qb.sceneTagsCriterionHandler(sceneMarkerFilter.SceneTags), qb.performersCriterionHandler(sceneMarkerFilter.Performers), qb.scenesCriterionHandler(sceneMarkerFilter.Scenes), + floatCriterionHandler(sceneMarkerFilter.Duration, "COALESCE(scene_markers.end_seconds - scene_markers.seconds, NULL)", nil), ×tampCriterionHandler{sceneMarkerFilter.CreatedAt, "scene_markers.created_at", nil}, ×tampCriterionHandler{sceneMarkerFilter.UpdatedAt, "scene_markers.updated_at", nil}, &dateCriterionHandler{sceneMarkerFilter.SceneDate, "scenes.date", qb.joinScenes}, diff --git a/pkg/sqlite/scene_marker_test.go b/pkg/sqlite/scene_marker_test.go index ce8f4d3ad6b..64893b3a67f 100644 --- a/pkg/sqlite/scene_marker_test.go +++ b/pkg/sqlite/scene_marker_test.go @@ -391,6 +391,116 @@ func TestMarkerQuerySceneTags(t *testing.T) { }) } +func markersToIDs(i []*models.SceneMarker) []int { + ret := make([]int, len(i)) + for i, v := range i { + ret[i] = v.ID + } + + return ret +} + +func TestMarkerQueryDuration(t *testing.T) { + type test struct { + name string + markerFilter *models.SceneMarkerFilterType + include []int + exclude []int + } + + cases := []test{ + { + "is null", + &models.SceneMarkerFilterType{ + Duration: &models.FloatCriterionInput{ + Modifier: models.CriterionModifierIsNull, + }, + }, + []int{markerIdxWithScene}, + []int{markerIdxWithDuration}, + }, + { + "not null", + &models.SceneMarkerFilterType{ + Duration: &models.FloatCriterionInput{ + Modifier: models.CriterionModifierNotNull, + }, + }, + []int{markerIdxWithDuration}, + []int{markerIdxWithScene}, + }, + { + "equals", + &models.SceneMarkerFilterType{ + Duration: &models.FloatCriterionInput{ + Modifier: models.CriterionModifierEquals, + Value: markerIdxWithDuration, + }, + }, + []int{markerIdxWithDuration}, + []int{markerIdx2WithDuration, markerIdxWithScene}, + }, + { + "not equals", + &models.SceneMarkerFilterType{ + Duration: &models.FloatCriterionInput{ + Modifier: models.CriterionModifierNotEquals, + Value: markerIdx2WithDuration, + }, + }, + []int{markerIdxWithDuration}, + []int{markerIdx2WithDuration, markerIdxWithScene}, + }, + { + "greater than", + &models.SceneMarkerFilterType{ + Duration: &models.FloatCriterionInput{ + Modifier: models.CriterionModifierGreaterThan, + Value: markerIdxWithDuration, + }, + }, + []int{markerIdx2WithDuration}, + []int{markerIdxWithDuration, markerIdxWithScene}, + }, + { + "less than", + &models.SceneMarkerFilterType{ + Duration: &models.FloatCriterionInput{ + Modifier: models.CriterionModifierLessThan, + Value: markerIdx2WithDuration, + }, + }, + []int{markerIdxWithDuration}, + []int{markerIdx2WithDuration, markerIdxWithScene}, + }, + } + + qb := db.SceneMarker + + for _, tt := range cases { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + got, _, err := qb.Query(ctx, tt.markerFilter, nil) + if err != nil { + t.Errorf("SceneMarkerStore.Query() error = %v", err) + return + } + + ids := markersToIDs(got) + include := indexesToIDs(markerIDs, tt.include) + exclude := indexesToIDs(markerIDs, tt.exclude) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } + +} + func queryMarkers(ctx context.Context, t *testing.T, sqb models.SceneMarkerReader, markerFilter *models.SceneMarkerFilterType, findFilter *models.FindFilterType) []*models.SceneMarker { t.Helper() result, _, err := sqb.Query(ctx, markerFilter, findFilter) diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index 1c3f914d3ba..b63b6a04a2c 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -276,6 +276,8 @@ const ( markerIdxWithScene = iota markerIdxWithTag markerIdxWithSceneTag + markerIdxWithDuration + markerIdx2WithDuration totalMarkers ) @@ -1754,10 +1756,20 @@ func createStudios(ctx context.Context, n int, o int) error { return nil } +func getMarkerEndSeconds(index int) *float64 { + if index != markerIdxWithDuration && index != markerIdx2WithDuration { + return nil + } + ret := float64(index) + return &ret +} + func createMarker(ctx context.Context, mqb models.SceneMarkerReaderWriter, markerSpec markerSpec) error { + markerIdx := len(markerIDs) marker := models.SceneMarker{ SceneID: sceneIDs[markerSpec.sceneIdx], PrimaryTagID: tagIDs[markerSpec.primaryTagIdx], + EndSeconds: getMarkerEndSeconds(markerIdx), } err := mqb.Create(ctx, &marker) diff --git a/ui/v2.5/src/models/list-filter/criteria/criterion.ts b/ui/v2.5/src/models/list-filter/criteria/criterion.ts index d950073be4c..4fbf7c03b68 100644 --- a/ui/v2.5/src/models/list-filter/criteria/criterion.ts +++ b/ui/v2.5/src/models/list-filter/criteria/criterion.ts @@ -637,7 +637,11 @@ export function createNumberCriterionOption( } export class NullNumberCriterionOption extends CriterionOption { - constructor(messageID: string, value: CriterionType) { + constructor( + messageID: string, + value: CriterionType, + makeCriterion?: () => Criterion + ) { super({ messageID, type: value, @@ -653,7 +657,9 @@ export class NullNumberCriterionOption extends CriterionOption { ], defaultModifier: CriterionModifier.Equals, inputType: "number", - makeCriterion: () => new NumberCriterion(this), + makeCriterion: makeCriterion + ? makeCriterion + : () => new NumberCriterion(this), }); } } @@ -780,6 +786,19 @@ export function createDurationCriterionOption( return new DurationCriterionOption(messageID ?? value, value); } +export class NullDurationCriterionOption extends NullNumberCriterionOption { + constructor(messageID: string, value: CriterionType) { + super(messageID, value, () => new DurationCriterion(this)); + } +} + +export function createNullDurationCriterionOption( + value: CriterionType, + messageID?: string +) { + return new NullDurationCriterionOption(messageID ?? value, value); +} + export class DurationCriterion extends Criterion { constructor(type: CriterionOption) { super(type, { value: undefined, value2: undefined }); diff --git a/ui/v2.5/src/models/list-filter/scene-markers.ts b/ui/v2.5/src/models/list-filter/scene-markers.ts index 7f6e555ccf3..a70cd16291e 100644 --- a/ui/v2.5/src/models/list-filter/scene-markers.ts +++ b/ui/v2.5/src/models/list-filter/scene-markers.ts @@ -6,10 +6,12 @@ import { DisplayMode } from "./types"; import { createDateCriterionOption, createMandatoryTimestampCriterionOption, + createNullDurationCriterionOption, } from "./criteria/criterion"; const defaultSortBy = "title"; const sortByOptions = [ + "duration", "title", "seconds", "scene_id", @@ -22,6 +24,7 @@ const criterionOptions = [ MarkersScenesCriterionOption, SceneTagsCriterionOption, PerformersCriterionOption, + createNullDurationCriterionOption("duration"), createMandatoryTimestampCriterionOption("created_at"), createMandatoryTimestampCriterionOption("updated_at"), createDateCriterionOption("scene_date"), From 7f8349469a73a7b8215acdd1f8a142d6a65cbe67 Mon Sep 17 00:00:00 2001 From: dogwithakeyboard <128322708+dogwithakeyboard@users.noreply.github.com> Date: Fri, 29 Nov 2024 06:02:20 +0000 Subject: [PATCH 08/12] Scene Marker grid view (#5443) * add bulk delete mutation --------- Co-authored-by: WithoutPants <53250216+WithoutPants@users.noreply.github.com> --- graphql/schema/schema.graphql | 1 + internal/api/resolver_mutation_scene.go | 51 +++-- ui/v2.5/graphql/data/scene-marker.graphql | 17 +- .../graphql/mutations/scene-marker.graphql | 4 + .../Scenes/DeleteSceneMarkersDialog.tsx | 83 +++++++ .../src/components/Scenes/PreviewScrubber.tsx | 2 +- .../src/components/Scenes/SceneMarkerCard.tsx | 214 ++++++++++++++++++ .../Scenes/SceneMarkerCardsGrid.tsx | 38 ++++ .../src/components/Scenes/SceneMarkerList.tsx | 33 ++- ui/v2.5/src/components/Scenes/styles.scss | 4 +- .../Settings/Tasks/GenerateOptions.tsx | 2 - ui/v2.5/src/components/Shared/TagLink.tsx | 9 +- ui/v2.5/src/core/StashService.ts | 18 ++ ui/v2.5/src/locales/en-GB.json | 2 +- .../src/models/list-filter/scene-markers.ts | 2 +- ui/v2.5/src/utils/navigation.ts | 27 +++ 16 files changed, 479 insertions(+), 28 deletions(-) create mode 100644 ui/v2.5/src/components/Scenes/DeleteSceneMarkersDialog.tsx create mode 100644 ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx create mode 100644 ui/v2.5/src/components/Scenes/SceneMarkerCardsGrid.tsx diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 251c2af838c..31218577df3 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -300,6 +300,7 @@ type Mutation { sceneMarkerCreate(input: SceneMarkerCreateInput!): SceneMarker sceneMarkerUpdate(input: SceneMarkerUpdateInput!): SceneMarker sceneMarkerDestroy(id: ID!): Boolean! + sceneMarkersDestroy(ids: [ID!]!): Boolean! sceneAssignFile(input: AssignSceneFileInput!): Boolean! diff --git a/internal/api/resolver_mutation_scene.go b/internal/api/resolver_mutation_scene.go index 101cc8ba5e5..644732be94e 100644 --- a/internal/api/resolver_mutation_scene.go +++ b/internal/api/resolver_mutation_scene.go @@ -814,11 +814,16 @@ func (r *mutationResolver) SceneMarkerUpdate(ctx context.Context, input SceneMar } func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (bool, error) { - markerID, err := strconv.Atoi(id) + return r.SceneMarkersDestroy(ctx, []string{id}) +} + +func (r *mutationResolver) SceneMarkersDestroy(ctx context.Context, markerIDs []string) (bool, error) { + ids, err := stringslice.StringSliceToIntSlice(markerIDs) if err != nil { - return false, fmt.Errorf("converting id: %w", err) + return false, fmt.Errorf("converting ids: %w", err) } + var markers []*models.SceneMarker fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm() fileDeleter := &scene.FileDeleter{ @@ -831,35 +836,45 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b qb := r.repository.SceneMarker sqb := r.repository.Scene - marker, err := qb.Find(ctx, markerID) + for _, markerID := range ids { + marker, err := qb.Find(ctx, markerID) - if err != nil { - return err - } + if err != nil { + return err + } - if marker == nil { - return fmt.Errorf("scene marker with id %d not found", markerID) - } + if marker == nil { + return fmt.Errorf("scene marker with id %d not found", markerID) + } - s, err := sqb.Find(ctx, marker.SceneID) - if err != nil { - return err - } + s, err := sqb.Find(ctx, marker.SceneID) - if s == nil { - return fmt.Errorf("scene with id %d not found", marker.SceneID) + if err != nil { + return err + } + + if s == nil { + return fmt.Errorf("scene with id %d not found", marker.SceneID) + } + + markers = append(markers, marker) + + if err := scene.DestroyMarker(ctx, s, marker, qb, fileDeleter); err != nil { + return err + } } - return scene.DestroyMarker(ctx, s, marker, qb, fileDeleter) + return nil }); err != nil { fileDeleter.Rollback() return false, err } - // perform the post-commit actions fileDeleter.Commit() - r.hookExecutor.ExecutePostHooks(ctx, markerID, hook.SceneMarkerDestroyPost, id, nil) + for _, marker := range markers { + r.hookExecutor.ExecutePostHooks(ctx, marker.ID, hook.SceneMarkerDestroyPost, markerIDs, nil) + } return true, nil } diff --git a/ui/v2.5/graphql/data/scene-marker.graphql b/ui/v2.5/graphql/data/scene-marker.graphql index e2ebfc4df34..a5dbc8a6c7c 100644 --- a/ui/v2.5/graphql/data/scene-marker.graphql +++ b/ui/v2.5/graphql/data/scene-marker.graphql @@ -8,7 +8,7 @@ fragment SceneMarkerData on SceneMarker { screenshot scene { - id + ...SceneMarkerSceneData } primary_tag { @@ -21,3 +21,18 @@ fragment SceneMarkerData on SceneMarker { name } } + +fragment SceneMarkerSceneData on Scene { + id + title + files { + width + height + path + } + performers { + id + name + image_path + } +} diff --git a/ui/v2.5/graphql/mutations/scene-marker.graphql b/ui/v2.5/graphql/mutations/scene-marker.graphql index 3b1de35c7b2..766e318fc6a 100644 --- a/ui/v2.5/graphql/mutations/scene-marker.graphql +++ b/ui/v2.5/graphql/mutations/scene-marker.graphql @@ -47,3 +47,7 @@ mutation SceneMarkerUpdate( mutation SceneMarkerDestroy($id: ID!) { sceneMarkerDestroy(id: $id) } + +mutation SceneMarkersDestroy($ids: [ID!]!) { + sceneMarkersDestroy(ids: $ids) +} diff --git a/ui/v2.5/src/components/Scenes/DeleteSceneMarkersDialog.tsx b/ui/v2.5/src/components/Scenes/DeleteSceneMarkersDialog.tsx new file mode 100644 index 00000000000..01d0722261b --- /dev/null +++ b/ui/v2.5/src/components/Scenes/DeleteSceneMarkersDialog.tsx @@ -0,0 +1,83 @@ +import React, { useState } from "react"; +import { useSceneMarkersDestroy } from "src/core/StashService"; +import * as GQL from "src/core/generated-graphql"; +import { ModalComponent } from "src/components/Shared/Modal"; +import { useToast } from "src/hooks/Toast"; +import { useIntl } from "react-intl"; +import { faTrashAlt } from "@fortawesome/free-solid-svg-icons"; + +interface IDeleteSceneMarkersDialogProps { + selected: GQL.SceneMarkerDataFragment[]; + onClose: (confirmed: boolean) => void; +} + +export const DeleteSceneMarkersDialog: React.FC< + IDeleteSceneMarkersDialogProps +> = (props: IDeleteSceneMarkersDialogProps) => { + const intl = useIntl(); + const singularEntity = intl.formatMessage({ id: "marker" }); + const pluralEntity = intl.formatMessage({ id: "markers" }); + + const header = intl.formatMessage( + { id: "dialogs.delete_object_title" }, + { count: props.selected.length, singularEntity, pluralEntity } + ); + const toastMessage = intl.formatMessage( + { id: "toast.delete_past_tense" }, + { count: props.selected.length, singularEntity, pluralEntity } + ); + const message = intl.formatMessage( + { id: "dialogs.delete_object_desc" }, + { count: props.selected.length, singularEntity, pluralEntity } + ); + + const Toast = useToast(); + const [deleteSceneMarkers] = useSceneMarkersDestroy( + getSceneMarkersDeleteInput() + ); + + // Network state + const [isDeleting, setIsDeleting] = useState(false); + + function getSceneMarkersDeleteInput(): GQL.SceneMarkersDestroyMutationVariables { + return { + ids: props.selected.map((marker) => marker.id), + }; + } + + async function onDelete() { + setIsDeleting(true); + try { + await deleteSceneMarkers(); + Toast.success(toastMessage); + props.onClose(true); + } catch (e) { + Toast.error(e); + props.onClose(false); + } + setIsDeleting(false); + } + + return ( + props.onClose(false), + text: intl.formatMessage({ id: "actions.cancel" }), + variant: "secondary", + }} + isRunning={isDeleting} + > +

{message}

+
+ ); +}; + +export default DeleteSceneMarkersDialog; diff --git a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx index eb8f2c10425..143daca4f96 100644 --- a/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx +++ b/ui/v2.5/src/components/Scenes/PreviewScrubber.tsx @@ -94,7 +94,7 @@ export const PreviewScrubber: React.FC = ({ onClick(s.start); } - if (spriteInfo === null) return null; + if (spriteInfo === null || !vttPath) return null; return (
diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx new file mode 100644 index 00000000000..b18e1fa544a --- /dev/null +++ b/ui/v2.5/src/components/Scenes/SceneMarkerCard.tsx @@ -0,0 +1,214 @@ +import React, { useEffect, useMemo, useState } from "react"; +import { Button, ButtonGroup } from "react-bootstrap"; +import * as GQL from "src/core/generated-graphql"; +import { Icon } from "../Shared/Icon"; +import { TagLink } from "../Shared/TagLink"; +import { HoverPopover } from "../Shared/HoverPopover"; +import NavUtils from "src/utils/navigation"; +import TextUtils from "src/utils/text"; +import { ConfigurationContext } from "src/hooks/Config"; +import { GridCard, calculateCardWidth } from "../Shared/GridCard/GridCard"; +import { faTag } from "@fortawesome/free-solid-svg-icons"; +import ScreenUtils from "src/utils/screen"; +import { markerTitle } from "src/core/markers"; +import { Link } from "react-router-dom"; +import { objectTitle } from "src/core/files"; +import { PerformerPopoverButton } from "../Shared/PerformerPopoverButton"; +import { ScenePreview } from "./SceneCard"; +import { TruncatedText } from "../Shared/TruncatedText"; + +interface ISceneMarkerCardProps { + marker: GQL.SceneMarkerDataFragment; + containerWidth?: number; + previewHeight?: number; + index?: number; + compact?: boolean; + selecting?: boolean; + selected?: boolean | undefined; + zoomIndex?: number; + onSelectedChanged?: (selected: boolean, shiftKey: boolean) => void; +} + +const SceneMarkerCardPopovers = (props: ISceneMarkerCardProps) => { + function maybeRenderPerformerPopoverButton() { + if (props.marker.scene.performers.length <= 0) return; + + return ( + + ); + } + + function renderTagPopoverButton() { + const popoverContent = [ + , + ]; + + props.marker.tags.map((tag) => + popoverContent.push( + + ) + ); + + return ( + + + + ); + } + + function renderPopoverButtonGroup() { + if (!props.compact) { + return ( + <> +
+ + {maybeRenderPerformerPopoverButton()} + {renderTagPopoverButton()} + + + ); + } + } + + return <>{renderPopoverButtonGroup()}; +}; + +const SceneMarkerCardDetails = (props: ISceneMarkerCardProps) => { + return ( +
+ + {TextUtils.formatTimestampRange( + props.marker.seconds, + props.marker.end_seconds ?? undefined + )} + + + {objectTitle(props.marker.scene)} + + } + /> +
+ ); +}; + +const SceneMarkerCardImage = (props: ISceneMarkerCardProps) => { + const { configuration } = React.useContext(ConfigurationContext); + + const file = useMemo( + () => + props.marker.scene.files.length > 0 + ? props.marker.scene.files[0] + : undefined, + [props.marker.scene] + ); + + function isPortrait() { + const width = file?.width ? file.width : 0; + const height = file?.height ? file.height : 0; + return height > width; + } + + function maybeRenderSceneSpecsOverlay() { + return ( +
+ {props.marker.end_seconds && ( + + {TextUtils.secondsToTimestamp( + props.marker.end_seconds - props.marker.seconds + )} + + )} +
+ ); + } + + return ( + <> + + {maybeRenderSceneSpecsOverlay()} + + ); +}; + +export const SceneMarkerCard = (props: ISceneMarkerCardProps) => { + const [cardWidth, setCardWidth] = useState(); + + function zoomIndex() { + if (!props.compact && props.zoomIndex !== undefined) { + return `zoom-${props.zoomIndex}`; + } + + return ""; + } + + useEffect(() => { + if ( + !props.containerWidth || + props.zoomIndex === undefined || + ScreenUtils.isMobile() + ) + return; + + let zoomValue = props.zoomIndex; + let preferredCardWidth: number; + switch (zoomValue) { + case 0: + preferredCardWidth = 240; + break; + case 1: + preferredCardWidth = 340; // this value is intentionally higher than 320 + break; + case 2: + preferredCardWidth = 480; + break; + case 3: + preferredCardWidth = 640; + } + let fittedCardWidth = calculateCardWidth( + props.containerWidth, + preferredCardWidth! + ); + setCardWidth(fittedCardWidth); + }, [props, props.containerWidth, props.zoomIndex]); + + return ( + } + details={} + popovers={} + selected={props.selected} + selecting={props.selecting} + onSelectedChanged={props.onSelectedChanged} + /> + ); +}; diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerCardsGrid.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerCardsGrid.tsx new file mode 100644 index 00000000000..6532f535a97 --- /dev/null +++ b/ui/v2.5/src/components/Scenes/SceneMarkerCardsGrid.tsx @@ -0,0 +1,38 @@ +import React from "react"; +import * as GQL from "src/core/generated-graphql"; +import { SceneMarkerCard } from "./SceneMarkerCard"; +import { useContainerDimensions } from "../Shared/GridCard/GridCard"; + +interface ISceneMarkerCardsGrid { + markers: GQL.SceneMarkerDataFragment[]; + selectedIds: Set; + zoomIndex: number; + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void; +} + +export const SceneMarkerCardsGrid: React.FC = ({ + markers, + selectedIds, + zoomIndex, + onSelectChange, +}) => { + const [componentRef, { width }] = useContainerDimensions(); + return ( +
+ {markers.map((marker, index) => ( + 0} + selected={selectedIds.has(marker.id)} + onSelectedChanged={(selected: boolean, shiftKey: boolean) => + onSelectChange(marker.id, selected, shiftKey) + } + /> + ))} +
+ ); +}; diff --git a/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx b/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx index 2bf7ae8dba5..33ae79558bd 100644 --- a/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx +++ b/ui/v2.5/src/components/Scenes/SceneMarkerList.tsx @@ -14,6 +14,8 @@ import { ListFilterModel } from "src/models/list-filter/filter"; import { DisplayMode } from "src/models/list-filter/types"; import { MarkerWallPanel } from "../Wall/WallPanel"; import { View } from "../List/views"; +import { SceneMarkerCardsGrid } from "./SceneMarkerCardsGrid"; +import { DeleteSceneMarkersDialog } from "./DeleteSceneMarkersDialog"; function getItems(result: GQL.FindSceneMarkersQueryResult) { return result?.data?.findSceneMarkers?.scene_markers ?? []; @@ -27,6 +29,7 @@ interface ISceneMarkerList { filterHook?: (filter: ListFilterModel) => ListFilterModel; view?: View; alterQuery?: boolean; + defaultSort?: string; } export const SceneMarkerList: React.FC = ({ @@ -84,7 +87,9 @@ export const SceneMarkerList: React.FC = ({ function renderContent( result: GQL.FindSceneMarkersQueryResult, - filter: ListFilterModel + filter: ListFilterModel, + selectedIds: Set, + onSelectChange: (id: string, selected: boolean, shiftKey: boolean) => void ) { if (!result.data?.findSceneMarkers) return; @@ -93,6 +98,29 @@ export const SceneMarkerList: React.FC = ({ ); } + + if (filter.displayMode === DisplayMode.Grid) { + return ( + + ); + } + } + + function renderDeleteDialog( + selectedSceneMarkers: GQL.SceneMarkerDataFragment[], + onClose: (confirmed: boolean) => void + ) { + return ( + + ); } return ( @@ -104,12 +132,15 @@ export const SceneMarkerList: React.FC = ({ alterQuery={alterQuery} filterHook={filterHook} view={view} + selectable > ); diff --git a/ui/v2.5/src/components/Scenes/styles.scss b/ui/v2.5/src/components/Scenes/styles.scss index ca1d051cd02..bb50236ecb6 100644 --- a/ui/v2.5/src/components/Scenes/styles.scss +++ b/ui/v2.5/src/components/Scenes/styles.scss @@ -215,6 +215,7 @@ textarea.scene-description { } .scene-card, +.scene-marker-card, .gallery-card { .scene-specs-overlay { transition: opacity 0.5s; @@ -272,7 +273,8 @@ textarea.scene-description { } } -.scene-card.card { +.scene-card.card, +.scene-marker-card.card { overflow: hidden; padding: 0; diff --git a/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx b/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx index c0127b5db33..00d129be749 100644 --- a/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx +++ b/ui/v2.5/src/components/Settings/Tasks/GenerateOptions.tsx @@ -110,9 +110,7 @@ export const GenerateOptions: React.FC = ({ } /> = ({ interface IPerformerLinkProps { performer: INamedObject & { disambiguation?: string | null }; - linkType?: "scene" | "gallery" | "image"; + linkType?: "scene" | "gallery" | "image" | "scene_marker"; className?: string; } @@ -55,6 +55,8 @@ export const PerformerLink: React.FC = ({ return NavUtils.makePerformerGalleriesUrl(performer); case "image": return NavUtils.makePerformerImagesUrl(performer); + case "scene_marker": + return NavUtils.makePerformerSceneMarkersUrl(performer); case "scene": default: return NavUtils.makePerformerScenesUrl(performer); @@ -209,7 +211,8 @@ interface ITagLinkProps { | "details" | "performer" | "group" - | "studio"; + | "studio" + | "scene_marker"; className?: string; hoverPlacement?: Placement; showHierarchyIcon?: boolean; @@ -238,6 +241,8 @@ export const TagLink: React.FC = ({ return NavUtils.makeTagImagesUrl(tag); case "group": return NavUtils.makeTagGroupsUrl(tag); + case "scene_marker": + return NavUtils.makeTagSceneMarkersUrl(tag); case "details": return NavUtils.makeTagUrl(tag.id ?? ""); } diff --git a/ui/v2.5/src/core/StashService.ts b/ui/v2.5/src/core/StashService.ts index 89500419f53..1d9e344ebd8 100644 --- a/ui/v2.5/src/core/StashService.ts +++ b/ui/v2.5/src/core/StashService.ts @@ -1499,6 +1499,24 @@ export const useSceneMarkerDestroy = () => }, }); +export const useSceneMarkersDestroy = ( + input: GQL.SceneMarkersDestroyMutationVariables +) => + GQL.useSceneMarkersDestroyMutation({ + variables: input, + update(cache, result) { + if (!result.data?.sceneMarkersDestroy) return; + + for (const id of input.ids) { + const obj = { __typename: "SceneMarker", id }; + cache.evict({ id: cache.identify(obj) }); + } + + evictTypeFields(cache, sceneMarkerMutationImpactedTypeFields); + evictQueries(cache, sceneMarkerMutationImpactedQueries); + }, + }); + const galleryMutationImpactedTypeFields = { Scene: ["galleries"], Performer: ["gallery_count", "performer_count"], diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index 143632af005..ac477d188a6 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -939,7 +939,7 @@ "marker_image_previews": "Marker Animated Image Previews", "marker_image_previews_tooltip": "Also generate animated (webp) previews, only required when Scene/Marker Wall Preview Type is set to Animated Image. When browsing they use less CPU than the video previews, but are generated in addition to them and are larger files.", "marker_screenshots": "Marker Screenshots", - "marker_screenshots_tooltip": "Marker static JPG images, only required if Preview Type is set to Static Image.", + "marker_screenshots_tooltip": "Marker static JPG images", "markers": "Marker Previews", "markers_tooltip": "20 second videos which begin at the given timecode.", "override_preview_generation_options": "Override Preview Generation Options", diff --git a/ui/v2.5/src/models/list-filter/scene-markers.ts b/ui/v2.5/src/models/list-filter/scene-markers.ts index a70cd16291e..fa7d4e71ae7 100644 --- a/ui/v2.5/src/models/list-filter/scene-markers.ts +++ b/ui/v2.5/src/models/list-filter/scene-markers.ts @@ -18,7 +18,7 @@ const sortByOptions = [ "random", "scenes_updated_at", ].map(ListFilterOptions.createSortBy); -const displayModeOptions = [DisplayMode.Wall]; +const displayModeOptions = [DisplayMode.Grid, DisplayMode.Wall]; const criterionOptions = [ TagsCriterionOption, MarkersScenesCriterionOption, diff --git a/ui/v2.5/src/utils/navigation.ts b/ui/v2.5/src/utils/navigation.ts index 4b4b2bf69a7..f6712fb58fb 100644 --- a/ui/v2.5/src/utils/navigation.ts +++ b/ui/v2.5/src/utils/navigation.ts @@ -30,6 +30,8 @@ import { PhashCriterion } from "src/models/list-filter/criteria/phash"; import { ILabeledId } from "src/models/list-filter/types"; import { IntlShape } from "react-intl"; import { galleryTitle } from "src/core/galleries"; +import { MarkersScenesCriterion } from "src/models/list-filter/criteria/scenes"; +import { objectTitle } from "src/core/files"; function addExtraCriteria( dest: Criterion[], @@ -129,6 +131,20 @@ const makePerformerGroupsUrl = ( return `/groups?${filter.makeQueryParameters()}`; }; +const makePerformerSceneMarkersUrl = ( + performer: Partial +) => { + if (!performer.id) return "#"; + const filter = new ListFilterModel(GQL.FilterMode.SceneMarkers, undefined); + const criterion = new PerformersCriterion(); + criterion.value.items = [ + { id: performer.id, label: performer.name || `Performer ${performer.id}` }, + ]; + + filter.criteria.push(criterion); + return `/scenes/markers?${filter.makeQueryParameters()}`; +}; + const makePerformersCountryUrl = ( performer: Partial ) => { @@ -429,6 +445,15 @@ const makeSubGroupsUrl = (group: INamedObject) => { return `/groups?${filter.makeQueryParameters()}`; }; +const makeSceneMarkersSceneUrl = (scene: GQL.SceneMarkerSceneDataFragment) => { + if (!scene.id) return "#"; + const filter = new ListFilterModel(GQL.FilterMode.SceneMarkers, undefined); + const criterion = new MarkersScenesCriterion(); + criterion.value = [{ id: scene.id, label: objectTitle(scene) }]; + filter.criteria.push(criterion); + return `/scenes/markers?${filter.makeQueryParameters()}`; +}; + export function handleUnsavedChanges( intl: IntlShape, basepath: string, @@ -449,6 +474,7 @@ const NavUtils = { makePerformerImagesUrl, makePerformerGalleriesUrl, makePerformerGroupsUrl, + makePerformerSceneMarkersUrl, makePerformersCountryUrl, makeStudioScenesUrl, makeStudioImagesUrl, @@ -477,6 +503,7 @@ const NavUtils = { makeDirectorGroupsUrl, makeContainingGroupsUrl, makeSubGroupsUrl, + makeSceneMarkersSceneUrl, }; export default NavUtils; From 60bb6bf50ba626a77f4bcaf7b761e4082028a82b Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Tue, 3 Dec 2024 08:02:13 +1100 Subject: [PATCH 09/12] Hide legacy groups criterion option (#5521) --- ui/v2.5/src/components/List/EditFilterDialog.tsx | 12 +++++++----- ui/v2.5/src/models/list-filter/criteria/criterion.ts | 6 ++++++ ui/v2.5/src/models/list-filter/criteria/groups.ts | 1 + 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/ui/v2.5/src/components/List/EditFilterDialog.tsx b/ui/v2.5/src/components/List/EditFilterDialog.tsx index 169199058ec..43546731f62 100644 --- a/ui/v2.5/src/components/List/EditFilterDialog.tsx +++ b/ui/v2.5/src/components/List/EditFilterDialog.tsx @@ -243,11 +243,13 @@ export const EditFilterDialog: React.FC = ({ }, [currentFilter.mode]); const criterionOptions = useMemo(() => { - return [...filterOptions.criterionOptions].sort((a, b) => { - return intl - .formatMessage({ id: a.messageID }) - .localeCompare(intl.formatMessage({ id: b.messageID })); - }); + return [...filterOptions.criterionOptions] + .filter((c) => !c.hidden) + .sort((a, b) => { + return intl + .formatMessage({ id: a.messageID }) + .localeCompare(intl.formatMessage({ id: b.messageID })); + }); }, [intl, filterOptions.criterionOptions]); const optionSelected = useCallback( diff --git a/ui/v2.5/src/models/list-filter/criteria/criterion.ts b/ui/v2.5/src/models/list-filter/criteria/criterion.ts index 4fbf7c03b68..b55e3c1f73e 100644 --- a/ui/v2.5/src/models/list-filter/criteria/criterion.ts +++ b/ui/v2.5/src/models/list-filter/criteria/criterion.ts @@ -192,6 +192,7 @@ interface ICriterionOptionsParams { modifierOptions?: CriterionModifier[]; defaultModifier?: CriterionModifier; options?: Option[]; + hidden?: boolean; makeCriterion: ( o: CriterionOption, config?: ConfigDataFragment @@ -204,6 +205,10 @@ export class CriterionOption { public readonly defaultModifier: CriterionModifier; public readonly options: Option[] | undefined; public readonly inputType: InputType; + + // used for legacy criteria that are not shown in the UI + public readonly hidden: boolean = false; + public readonly makeCriterionFn: ( o: CriterionOption, config?: ConfigDataFragment @@ -216,6 +221,7 @@ export class CriterionOption { this.defaultModifier = options.defaultModifier ?? CriterionModifier.Equals; this.options = options.options; this.inputType = options.inputType; + this.hidden = options.hidden ?? false; this.makeCriterionFn = options.makeCriterion; } diff --git a/ui/v2.5/src/models/list-filter/criteria/groups.ts b/ui/v2.5/src/models/list-filter/criteria/groups.ts index 762ebf6e8b7..0db384c6a73 100644 --- a/ui/v2.5/src/models/list-filter/criteria/groups.ts +++ b/ui/v2.5/src/models/list-filter/criteria/groups.ts @@ -50,5 +50,6 @@ export const LegacyMoviesCriterionOption = new CriterionOption({ modifierOptions, defaultModifier, inputType, + hidden: true, makeCriterion: () => new GroupsCriterion(GroupsCriterionOption), }); From 4be793d4b35271db5154bce82c187ed87a478a7b Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Tue, 3 Dec 2024 08:02:29 +1100 Subject: [PATCH 10/12] Fix scraped tags issues (#5522) * Fix display of matched scraped tags * Fix create new scraped tag not updating field correctly --- ui/v2.5/src/hooks/tagsEdit.tsx | 45 +++++++++++++++++++++------------- 1 file changed, 28 insertions(+), 17 deletions(-) diff --git a/ui/v2.5/src/hooks/tagsEdit.tsx b/ui/v2.5/src/hooks/tagsEdit.tsx index 73d6c951d95..7654081cf34 100644 --- a/ui/v2.5/src/hooks/tagsEdit.tsx +++ b/ui/v2.5/src/hooks/tagsEdit.tsx @@ -44,10 +44,15 @@ export function useTagsEdit( } // add the new tag to the new tags value - const newTagIds = tags - .map((t) => t.id) - .concat([result.data.tagCreate.id]); - setFieldValue(newTagIds); + onSetTags( + tags.concat([ + { + id: result.data.tagCreate.id, + name: toCreate.name ?? "", + aliases: [], + }, + ]) + ); // remove the tag from the list const newTagsClone = newTags!.concat(); @@ -73,20 +78,26 @@ export function useTagsEdit( function updateTagsStateFromScraper( scrapedTags?: Pick[] ) { - if (scrapedTags) { - // map tags to their ids and filter out those not found - onSetTags( - scrapedTags.map((p) => { - return { - id: p.stored_id!, - name: p.name ?? "", - aliases: [], - }; - }) - ); - - setNewTags(scrapedTags.filter((t) => !t.stored_id)); + if (!scrapedTags) { + return; } + + // map tags to their ids and filter out those not found + const idTags = scrapedTags.filter( + (t) => t.stored_id !== undefined && t.stored_id !== null + ); + const newNewTags = scrapedTags.filter((t) => !t.stored_id); + onSetTags( + idTags.map((p) => { + return { + id: p.stored_id!, + name: p.name ?? "", + aliases: [], + }; + }) + ); + + setNewTags(newNewTags); } function renderNewTags() { From a0e09bbe5c5a6890914eefa8bf7ff81809be7135 Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Tue, 3 Dec 2024 08:02:46 +1100 Subject: [PATCH 11/12] Fix UI plugin race conditions (#5523) * useScript to return load state of scripts * Wait for scripts to load before rendering Also moves plugin code into plugins.tsx --- ui/v2.5/src/App.tsx | 172 ++++++++------------------------ ui/v2.5/src/hooks/useScript.tsx | 26 ++++- ui/v2.5/src/locales/en-GB.json | 3 +- ui/v2.5/src/plugins.tsx | 117 ++++++++++++++++++++++ 4 files changed, 184 insertions(+), 134 deletions(-) diff --git a/ui/v2.5/src/App.tsx b/ui/v2.5/src/App.tsx index f3229d36f59..658ca301122 100644 --- a/ui/v2.5/src/App.tsx +++ b/ui/v2.5/src/App.tsx @@ -18,7 +18,6 @@ import locales, { registerCountry } from "src/locales"; import { useConfiguration, useConfigureUI, - usePlugins, useSystemStatus, } from "src/core/StashService"; import flattenMessages from "./utils/flattenMessages"; @@ -40,12 +39,9 @@ import { releaseNotes } from "./docs/en/ReleaseNotes"; import { getPlatformURL } from "./core/createClient"; import { lazyComponent } from "./utils/lazyComponent"; import { isPlatformUniquelyRenderedByApple } from "./utils/apple"; -import useScript, { useCSS } from "./hooks/useScript"; -import { useMemoOnce } from "./hooks/state"; import Event from "./hooks/event"; -import { uniq } from "lodash-es"; -import { PluginRoutes } from "./plugins"; +import { PluginRoutes, PluginsLoader } from "./plugins"; // import plugin_api to run code import "./pluginApi"; @@ -97,54 +93,6 @@ function languageMessageString(language: string) { return language.replace(/-/, ""); } -type PluginList = NonNullable>; - -// sort plugins by their dependencies -function sortPlugins(plugins: PluginList) { - type Node = { id: string; afters: string[] }; - - let nodes: Record = {}; - let sorted: PluginList = []; - let visited: Record = {}; - - plugins.forEach((v) => { - let from = v.id; - - if (!nodes[from]) nodes[from] = { id: from, afters: [] }; - - v.requires?.forEach((to) => { - if (!nodes[to]) nodes[to] = { id: to, afters: [] }; - if (!nodes[to].afters.includes(from)) nodes[to].afters.push(from); - }); - }); - - function visit(idstr: string, ancestors: string[] = []) { - let node = nodes[idstr]; - const { id } = node; - - if (visited[idstr]) return; - - ancestors.push(id); - visited[idstr] = true; - node.afters.forEach(function (afterID) { - if (ancestors.indexOf(afterID) >= 0) - throw new Error("closed chain : " + afterID + " is in " + id); - visit(afterID.toString(), ancestors.slice()); - }); - - const plugin = plugins.find((v) => v.id === id); - if (plugin) { - sorted.unshift(plugin); - } - } - - Object.keys(nodes).forEach((n) => { - visit(n); - }); - - return sorted; -} - const AppContainer: React.FC> = PatchFunction( "App", (props: React.PropsWithChildren<{}>) => { @@ -215,46 +163,6 @@ export const App: React.FC = () => { setLocale(); }, [customMessages, language]); - const { - data: plugins, - loading: pluginsLoading, - error: pluginsError, - } = usePlugins(); - - const sortedPlugins = useMemoOnce(() => { - return [ - sortPlugins(plugins?.plugins ?? []), - !pluginsLoading && !pluginsError, - ]; - }, [plugins?.plugins, pluginsLoading, pluginsError]); - - const pluginJavascripts = useMemoOnce(() => { - return [ - uniq( - sortedPlugins - ?.filter((plugin) => plugin.enabled && plugin.paths.javascript) - .map((plugin) => plugin.paths.javascript!) - .flat() ?? [] - ), - !!sortedPlugins && !pluginsLoading && !pluginsError, - ]; - }, [sortedPlugins, pluginsLoading, pluginsError]); - - const pluginCSS = useMemoOnce(() => { - return [ - uniq( - sortedPlugins - ?.filter((plugin) => plugin.enabled && plugin.paths.css) - .map((plugin) => plugin.paths.css!) - .flat() ?? [] - ), - !!sortedPlugins && !pluginsLoading && !pluginsError, - ]; - }, [sortedPlugins, pluginsLoading, pluginsError]); - - useScript(pluginJavascripts ?? [], !pluginsLoading && !pluginsError); - useCSS(pluginCSS ?? [], !pluginsLoading && !pluginsError); - const location = useLocation(); const history = useHistory(); const setupMatch = useRouteMatch(["/setup", "/migrate"]); @@ -365,43 +273,45 @@ export const App: React.FC = () => { const titleProps = makeTitleProps(); return ( - - - {messages ? ( - - - {maybeRenderReleaseNotes()} - - - }> - - - - - {maybeRenderNavbar()} -
- {renderContent()} -
-
-
-
-
-
-
-
- ) : null} -
-
+ + {messages ? ( + + + + + {maybeRenderReleaseNotes()} + + + }> + + + + + {maybeRenderNavbar()} +
+ {renderContent()} +
+
+
+
+
+
+
+
+
+
+ ) : null} +
); }; diff --git a/ui/v2.5/src/hooks/useScript.tsx b/ui/v2.5/src/hooks/useScript.tsx index 652ae2db693..2d030508d15 100644 --- a/ui/v2.5/src/hooks/useScript.tsx +++ b/ui/v2.5/src/hooks/useScript.tsx @@ -1,6 +1,9 @@ -import { useEffect, useMemo } from "react"; +import { useEffect, useMemo, useState } from "react"; + +const useScript = (urls: string | string[], condition: boolean = true) => { + // array of booleans to track the loading state of each script + const [loadStates, setLoadStates] = useState(); -const useScript = (urls: string | string[], condition?: boolean) => { const urlArray = useMemo(() => { if (!Array.isArray(urls)) { return [urls]; @@ -10,12 +13,25 @@ const useScript = (urls: string | string[], condition?: boolean) => { }, [urls]); useEffect(() => { + if (condition) { + setLoadStates(urlArray.map(() => false)); + } + const scripts = urlArray.map((url) => { const script = document.createElement("script"); script.src = url; script.async = false; script.defer = true; + + function onLoad() { + setLoadStates((prev) => + prev!.map((state, i) => (i === urlArray.indexOf(url) ? true : state)) + ); + } + script.addEventListener("load", onLoad); + script.addEventListener("error", onLoad); // handle error as well + return script; }); @@ -33,6 +49,12 @@ const useScript = (urls: string | string[], condition?: boolean) => { } }; }, [urlArray, condition]); + + return ( + condition && + loadStates && + (loadStates.length === 0 || loadStates.every((state) => state)) + ); }; export const useCSS = (urls: string | string[], condition?: boolean) => { diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index ac477d188a6..f9e5c3c4902 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -1121,7 +1121,8 @@ "last_played_at": "Last Played At", "library": "Library", "loading": { - "generic": "Loading…" + "generic": "Loading…", + "plugins": "Loading plugins…" }, "marker_count": "Marker Count", "markers": "Markers", diff --git a/ui/v2.5/src/plugins.tsx b/ui/v2.5/src/plugins.tsx index 8289a9e8e8e..256e1e5ce46 100644 --- a/ui/v2.5/src/plugins.tsx +++ b/ui/v2.5/src/plugins.tsx @@ -1,5 +1,122 @@ import React from "react"; import { PatchFunction } from "./patch"; +import { usePlugins } from "./core/StashService"; +import { useMemoOnce } from "./hooks/state"; +import { uniq } from "lodash-es"; +import useScript, { useCSS } from "./hooks/useScript"; +import { PluginsQuery } from "./core/generated-graphql"; +import { LoadingIndicator } from "./components/Shared/LoadingIndicator"; +import { FormattedMessage } from "react-intl"; + +type PluginList = NonNullable>; + +// sort plugins by their dependencies +function sortPlugins(plugins: PluginList) { + type Node = { id: string; afters: string[] }; + + let nodes: Record = {}; + let sorted: PluginList = []; + let visited: Record = {}; + + plugins.forEach((v) => { + let from = v.id; + + if (!nodes[from]) nodes[from] = { id: from, afters: [] }; + + v.requires?.forEach((to) => { + if (!nodes[to]) nodes[to] = { id: to, afters: [] }; + if (!nodes[to].afters.includes(from)) nodes[to].afters.push(from); + }); + }); + + function visit(idstr: string, ancestors: string[] = []) { + let node = nodes[idstr]; + const { id } = node; + + if (visited[idstr]) return; + + ancestors.push(id); + visited[idstr] = true; + node.afters.forEach(function (afterID) { + if (ancestors.indexOf(afterID) >= 0) + throw new Error("closed chain : " + afterID + " is in " + id); + visit(afterID.toString(), ancestors.slice()); + }); + + const plugin = plugins.find((v) => v.id === id); + if (plugin) { + sorted.unshift(plugin); + } + } + + Object.keys(nodes).forEach((n) => { + visit(n); + }); + + return sorted; +} + +// load all plugins and their dependencies +// returns true when all plugins are loaded, regardess of success or failure +function useLoadPlugins() { + const { + data: plugins, + loading: pluginsLoading, + error: pluginsError, + } = usePlugins(); + + const sortedPlugins = useMemoOnce(() => { + return [ + sortPlugins(plugins?.plugins ?? []), + !pluginsLoading && !pluginsError, + ]; + }, [plugins?.plugins, pluginsLoading, pluginsError]); + + const pluginJavascripts = useMemoOnce(() => { + return [ + uniq( + sortedPlugins + ?.filter((plugin) => plugin.enabled && plugin.paths.javascript) + .map((plugin) => plugin.paths.javascript!) + .flat() ?? [] + ), + !!sortedPlugins && !pluginsLoading && !pluginsError, + ]; + }, [sortedPlugins, pluginsLoading, pluginsError]); + + const pluginCSS = useMemoOnce(() => { + return [ + uniq( + sortedPlugins + ?.filter((plugin) => plugin.enabled && plugin.paths.css) + .map((plugin) => plugin.paths.css!) + .flat() ?? [] + ), + !!sortedPlugins && !pluginsLoading && !pluginsError, + ]; + }, [sortedPlugins, pluginsLoading, pluginsError]); + + const pluginJavascriptLoaded = useScript( + pluginJavascripts ?? [], + !!pluginJavascripts && !pluginsLoading && !pluginsError + ); + useCSS(pluginCSS ?? [], !pluginsLoading && !pluginsError); + + return !pluginsLoading && !!pluginJavascripts && pluginJavascriptLoaded; +} + +export const PluginsLoader: React.FC> = ({ + children, +}) => { + const loaded = useLoadPlugins(); + + if (!loaded) + return ( + } /> + ); + + return <>{children}; +}; export const PluginRoutes: React.FC> = PatchFunction("PluginRoutes", (props: React.PropsWithChildren<{}>) => { From 8c8be22fe4da081b951f95ad71cb44e9411d14ec Mon Sep 17 00:00:00 2001 From: WithoutPants <53250216+WithoutPants@users.noreply.github.com> Date: Tue, 3 Dec 2024 13:49:55 +1100 Subject: [PATCH 12/12] Performer custom fields (#5487) * Backend changes * Show custom field values * Add custom fields table input * Add custom field filtering * Add unit tests * Include custom fields in import/export * Anonymise performer custom fields * Move json.Number handler functions to api * Handle json.Number conversion in api --- graphql/schema/types/filters.graphql | 8 + graphql/schema/types/metadata.graphql | 7 + graphql/schema/types/performer.graphql | 8 + internal/api/json.go | 36 ++ internal/api/json_test.go | 60 ++ .../api/loaders/customfieldsloader_gen.go | 221 +++++++ internal/api/loaders/dataloaders.go | 35 +- internal/api/resolver_model_performer.go | 13 + internal/api/resolver_mutation_configure.go | 10 +- internal/api/resolver_mutation_performer.go | 13 +- internal/autotag/integration_test.go | 2 +- internal/identify/performer.go | 2 +- internal/identify/performer_test.go | 10 +- internal/manager/task_stash_box_tag.go | 2 +- pkg/gallery/import.go | 4 +- pkg/gallery/import_test.go | 6 +- pkg/image/import.go | 4 +- pkg/image/import_test.go | 6 +- pkg/models/custom_fields.go | 17 + pkg/models/filter.go | 6 + pkg/models/jsonschema/performer.go | 2 + pkg/models/mocks/PerformerReaderWriter.go | 54 +- pkg/models/model_performer.go | 14 + pkg/models/performer.go | 7 + pkg/models/repository_performer.go | 6 +- pkg/performer/export.go | 7 + pkg/performer/export_test.go | 59 +- pkg/performer/import.go | 23 +- pkg/performer/import_test.go | 27 +- pkg/scene/import.go | 4 +- pkg/scene/import_test.go | 6 +- pkg/sqlite/anonymise.go | 74 +++ pkg/sqlite/custom_fields.go | 308 ++++++++++ pkg/sqlite/custom_fields_test.go | 176 ++++++ pkg/sqlite/database.go | 2 +- pkg/sqlite/filter.go | 7 +- pkg/sqlite/migrations/71_custom_fields.up.sql | 9 + pkg/sqlite/performer.go | 28 +- pkg/sqlite/performer_filter.go | 7 + pkg/sqlite/performer_test.go | 574 +++++++++++++++--- pkg/sqlite/query.go | 8 +- pkg/sqlite/repository.go | 4 +- pkg/sqlite/setup_test.go | 17 +- pkg/sqlite/tables.go | 1 + pkg/utils/json.go | 16 - pkg/utils/map.go | 17 - pkg/utils/map_test.go | 55 -- ui/v2.5/graphql/data/performer.graphql | 2 + .../PerformerDetailsPanel.tsx | 2 + .../PerformerDetails/PerformerEditPanel.tsx | 41 +- ui/v2.5/src/components/Performers/styles.scss | 11 + .../src/components/Shared/CollapseButton.tsx | 7 +- .../src/components/Shared/CustomFields.tsx | 308 ++++++++++ ui/v2.5/src/components/Shared/DetailItem.tsx | 21 +- ui/v2.5/src/components/Shared/styles.scss | 50 ++ ui/v2.5/src/locales/en-GB.json | 11 + 56 files changed, 2158 insertions(+), 277 deletions(-) create mode 100644 internal/api/json.go create mode 100644 internal/api/json_test.go create mode 100644 internal/api/loaders/customfieldsloader_gen.go create mode 100644 pkg/models/custom_fields.go create mode 100644 pkg/sqlite/custom_fields.go create mode 100644 pkg/sqlite/custom_fields_test.go create mode 100644 pkg/sqlite/migrations/71_custom_fields.up.sql delete mode 100644 pkg/utils/json.go create mode 100644 ui/v2.5/src/components/Shared/CustomFields.tsx diff --git a/graphql/schema/types/filters.graphql b/graphql/schema/types/filters.graphql index 23396a98ffd..7600b563b83 100644 --- a/graphql/schema/types/filters.graphql +++ b/graphql/schema/types/filters.graphql @@ -91,6 +91,12 @@ input StashIDCriterionInput { modifier: CriterionModifier! } +input CustomFieldCriterionInput { + field: String! + value: [Any!] + modifier: CriterionModifier! +} + input PerformerFilterType { AND: PerformerFilterType OR: PerformerFilterType @@ -182,6 +188,8 @@ input PerformerFilterType { created_at: TimestampCriterionInput "Filter by last update time" updated_at: TimestampCriterionInput + + custom_fields: [CustomFieldCriterionInput!] } input SceneMarkerFilterType { diff --git a/graphql/schema/types/metadata.graphql b/graphql/schema/types/metadata.graphql index 38c910d369c..923c25b4c32 100644 --- a/graphql/schema/types/metadata.graphql +++ b/graphql/schema/types/metadata.graphql @@ -338,3 +338,10 @@ type SystemStatus { input MigrateInput { backupPath: String! } + +input CustomFieldsInput { + "If populated, the entire custom fields map will be replaced with this value" + full: Map + "If populated, only the keys in this map will be updated" + partial: Map +} diff --git a/graphql/schema/types/performer.graphql b/graphql/schema/types/performer.graphql index d6f3dd832c4..fbb67ce8f07 100644 --- a/graphql/schema/types/performer.graphql +++ b/graphql/schema/types/performer.graphql @@ -58,6 +58,8 @@ type Performer { updated_at: Time! groups: [Group!]! movies: [Movie!]! @deprecated(reason: "use groups instead") + + custom_fields: Map! } input PerformerCreateInput { @@ -93,6 +95,8 @@ input PerformerCreateInput { hair_color: String weight: Int ignore_auto_tag: Boolean + + custom_fields: Map } input PerformerUpdateInput { @@ -129,6 +133,8 @@ input PerformerUpdateInput { hair_color: String weight: Int ignore_auto_tag: Boolean + + custom_fields: CustomFieldsInput } input BulkUpdateStrings { @@ -167,6 +173,8 @@ input BulkPerformerUpdateInput { hair_color: String weight: Int ignore_auto_tag: Boolean + + custom_fields: CustomFieldsInput } input PerformerDestroyInput { diff --git a/internal/api/json.go b/internal/api/json.go new file mode 100644 index 00000000000..edc5f9df80c --- /dev/null +++ b/internal/api/json.go @@ -0,0 +1,36 @@ +package api + +import ( + "encoding/json" + "strings" +) + +// JSONNumberToNumber converts a JSON number to either a float64 or int64. +func jsonNumberToNumber(n json.Number) interface{} { + if strings.Contains(string(n), ".") { + f, _ := n.Float64() + return f + } + ret, _ := n.Int64() + return ret +} + +// ConvertMapJSONNumbers converts all JSON numbers in a map to either float64 or int64. +func convertMapJSONNumbers(m map[string]interface{}) (ret map[string]interface{}) { + if m == nil { + return nil + } + + ret = make(map[string]interface{}) + for k, v := range m { + if n, ok := v.(json.Number); ok { + ret[k] = jsonNumberToNumber(n) + } else if mm, ok := v.(map[string]interface{}); ok { + ret[k] = convertMapJSONNumbers(mm) + } else { + ret[k] = v + } + } + + return ret +} diff --git a/internal/api/json_test.go b/internal/api/json_test.go new file mode 100644 index 00000000000..7c1b2fe90f0 --- /dev/null +++ b/internal/api/json_test.go @@ -0,0 +1,60 @@ +package api + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestConvertMapJSONNumbers(t *testing.T) { + tests := []struct { + name string + input map[string]interface{} + expected map[string]interface{} + }{ + { + name: "Convert JSON numbers to numbers", + input: map[string]interface{}{ + "int": json.Number("12"), + "float": json.Number("12.34"), + "string": "foo", + }, + expected: map[string]interface{}{ + "int": int64(12), + "float": 12.34, + "string": "foo", + }, + }, + { + name: "Convert JSON numbers to numbers in nested maps", + input: map[string]interface{}{ + "foo": map[string]interface{}{ + "int": json.Number("56"), + "float": json.Number("56.78"), + "nested-string": "bar", + }, + "int": json.Number("12"), + "float": json.Number("12.34"), + "string": "foo", + }, + expected: map[string]interface{}{ + "foo": map[string]interface{}{ + "int": int64(56), + "float": 56.78, + "nested-string": "bar", + }, + "int": int64(12), + "float": 12.34, + "string": "foo", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := convertMapJSONNumbers(tt.input) + assert.Equal(t, tt.expected, result) + }) + } +} diff --git a/internal/api/loaders/customfieldsloader_gen.go b/internal/api/loaders/customfieldsloader_gen.go new file mode 100644 index 00000000000..d4dd3de78ab --- /dev/null +++ b/internal/api/loaders/customfieldsloader_gen.go @@ -0,0 +1,221 @@ +// Code generated by github.com/vektah/dataloaden, DO NOT EDIT. + +package loaders + +import ( + "sync" + "time" + + "github.com/stashapp/stash/pkg/models" +) + +// CustomFieldsLoaderConfig captures the config to create a new CustomFieldsLoader +type CustomFieldsLoaderConfig struct { + // Fetch is a method that provides the data for the loader + Fetch func(keys []int) ([]models.CustomFieldMap, []error) + + // Wait is how long wait before sending a batch + Wait time.Duration + + // MaxBatch will limit the maximum number of keys to send in one batch, 0 = not limit + MaxBatch int +} + +// NewCustomFieldsLoader creates a new CustomFieldsLoader given a fetch, wait, and maxBatch +func NewCustomFieldsLoader(config CustomFieldsLoaderConfig) *CustomFieldsLoader { + return &CustomFieldsLoader{ + fetch: config.Fetch, + wait: config.Wait, + maxBatch: config.MaxBatch, + } +} + +// CustomFieldsLoader batches and caches requests +type CustomFieldsLoader struct { + // this method provides the data for the loader + fetch func(keys []int) ([]models.CustomFieldMap, []error) + + // how long to done before sending a batch + wait time.Duration + + // this will limit the maximum number of keys to send in one batch, 0 = no limit + maxBatch int + + // INTERNAL + + // lazily created cache + cache map[int]models.CustomFieldMap + + // the current batch. keys will continue to be collected until timeout is hit, + // then everything will be sent to the fetch method and out to the listeners + batch *customFieldsLoaderBatch + + // mutex to prevent races + mu sync.Mutex +} + +type customFieldsLoaderBatch struct { + keys []int + data []models.CustomFieldMap + error []error + closing bool + done chan struct{} +} + +// Load a CustomFieldMap by key, batching and caching will be applied automatically +func (l *CustomFieldsLoader) Load(key int) (models.CustomFieldMap, error) { + return l.LoadThunk(key)() +} + +// LoadThunk returns a function that when called will block waiting for a CustomFieldMap. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *CustomFieldsLoader) LoadThunk(key int) func() (models.CustomFieldMap, error) { + l.mu.Lock() + if it, ok := l.cache[key]; ok { + l.mu.Unlock() + return func() (models.CustomFieldMap, error) { + return it, nil + } + } + if l.batch == nil { + l.batch = &customFieldsLoaderBatch{done: make(chan struct{})} + } + batch := l.batch + pos := batch.keyIndex(l, key) + l.mu.Unlock() + + return func() (models.CustomFieldMap, error) { + <-batch.done + + var data models.CustomFieldMap + if pos < len(batch.data) { + data = batch.data[pos] + } + + var err error + // its convenient to be able to return a single error for everything + if len(batch.error) == 1 { + err = batch.error[0] + } else if batch.error != nil { + err = batch.error[pos] + } + + if err == nil { + l.mu.Lock() + l.unsafeSet(key, data) + l.mu.Unlock() + } + + return data, err + } +} + +// LoadAll fetches many keys at once. It will be broken into appropriate sized +// sub batches depending on how the loader is configured +func (l *CustomFieldsLoader) LoadAll(keys []int) ([]models.CustomFieldMap, []error) { + results := make([]func() (models.CustomFieldMap, error), len(keys)) + + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + + customFieldMaps := make([]models.CustomFieldMap, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + customFieldMaps[i], errors[i] = thunk() + } + return customFieldMaps, errors +} + +// LoadAllThunk returns a function that when called will block waiting for a CustomFieldMaps. +// This method should be used if you want one goroutine to make requests to many +// different data loaders without blocking until the thunk is called. +func (l *CustomFieldsLoader) LoadAllThunk(keys []int) func() ([]models.CustomFieldMap, []error) { + results := make([]func() (models.CustomFieldMap, error), len(keys)) + for i, key := range keys { + results[i] = l.LoadThunk(key) + } + return func() ([]models.CustomFieldMap, []error) { + customFieldMaps := make([]models.CustomFieldMap, len(keys)) + errors := make([]error, len(keys)) + for i, thunk := range results { + customFieldMaps[i], errors[i] = thunk() + } + return customFieldMaps, errors + } +} + +// Prime the cache with the provided key and value. If the key already exists, no change is made +// and false is returned. +// (To forcefully prime the cache, clear the key first with loader.clear(key).prime(key, value).) +func (l *CustomFieldsLoader) Prime(key int, value models.CustomFieldMap) bool { + l.mu.Lock() + var found bool + if _, found = l.cache[key]; !found { + l.unsafeSet(key, value) + } + l.mu.Unlock() + return !found +} + +// Clear the value at key from the cache, if it exists +func (l *CustomFieldsLoader) Clear(key int) { + l.mu.Lock() + delete(l.cache, key) + l.mu.Unlock() +} + +func (l *CustomFieldsLoader) unsafeSet(key int, value models.CustomFieldMap) { + if l.cache == nil { + l.cache = map[int]models.CustomFieldMap{} + } + l.cache[key] = value +} + +// keyIndex will return the location of the key in the batch, if its not found +// it will add the key to the batch +func (b *customFieldsLoaderBatch) keyIndex(l *CustomFieldsLoader, key int) int { + for i, existingKey := range b.keys { + if key == existingKey { + return i + } + } + + pos := len(b.keys) + b.keys = append(b.keys, key) + if pos == 0 { + go b.startTimer(l) + } + + if l.maxBatch != 0 && pos >= l.maxBatch-1 { + if !b.closing { + b.closing = true + l.batch = nil + go b.end(l) + } + } + + return pos +} + +func (b *customFieldsLoaderBatch) startTimer(l *CustomFieldsLoader) { + time.Sleep(l.wait) + l.mu.Lock() + + // we must have hit a batch limit and are already finalizing this batch + if b.closing { + l.mu.Unlock() + return + } + + l.batch = nil + l.mu.Unlock() + + b.end(l) +} + +func (b *customFieldsLoaderBatch) end(l *CustomFieldsLoader) { + b.data, b.error = l.fetch(b.keys) + close(b.done) +} diff --git a/internal/api/loaders/dataloaders.go b/internal/api/loaders/dataloaders.go index fca3e6c1842..493c353d785 100644 --- a/internal/api/loaders/dataloaders.go +++ b/internal/api/loaders/dataloaders.go @@ -13,6 +13,7 @@ //go:generate go run github.com/vektah/dataloaden SceneFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden ImageFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID //go:generate go run github.com/vektah/dataloaden GalleryFileIDsLoader int []github.com/stashapp/stash/pkg/models.FileID +//go:generate go run github.com/vektah/dataloaden CustomFieldsLoader int github.com/stashapp/stash/pkg/models.CustomFieldMap //go:generate go run github.com/vektah/dataloaden SceneOCountLoader int int //go:generate go run github.com/vektah/dataloaden ScenePlayCountLoader int int //go:generate go run github.com/vektah/dataloaden SceneOHistoryLoader int []time.Time @@ -51,13 +52,16 @@ type Loaders struct { ImageFiles *ImageFileIDsLoader GalleryFiles *GalleryFileIDsLoader - GalleryByID *GalleryLoader - ImageByID *ImageLoader - PerformerByID *PerformerLoader - StudioByID *StudioLoader - TagByID *TagLoader - GroupByID *GroupLoader - FileByID *FileLoader + GalleryByID *GalleryLoader + ImageByID *ImageLoader + + PerformerByID *PerformerLoader + PerformerCustomFields *CustomFieldsLoader + + StudioByID *StudioLoader + TagByID *TagLoader + GroupByID *GroupLoader + FileByID *FileLoader } type Middleware struct { @@ -88,6 +92,11 @@ func (m Middleware) Middleware(next http.Handler) http.Handler { maxBatch: maxBatch, fetch: m.fetchPerformers(ctx), }, + PerformerCustomFields: &CustomFieldsLoader{ + wait: wait, + maxBatch: maxBatch, + fetch: m.fetchPerformerCustomFields(ctx), + }, StudioByID: &StudioLoader{ wait: wait, maxBatch: maxBatch, @@ -214,6 +223,18 @@ func (m Middleware) fetchPerformers(ctx context.Context) func(keys []int) ([]*mo } } +func (m Middleware) fetchPerformerCustomFields(ctx context.Context) func(keys []int) ([]models.CustomFieldMap, []error) { + return func(keys []int) (ret []models.CustomFieldMap, errs []error) { + err := m.Repository.WithDB(ctx, func(ctx context.Context) error { + var err error + ret, err = m.Repository.Performer.GetCustomFieldsBulk(ctx, keys) + return err + }) + + return ret, toErrorSlice(err) + } +} + func (m Middleware) fetchStudios(ctx context.Context) func(keys []int) ([]*models.Studio, []error) { return func(keys []int) (ret []*models.Studio, errs []error) { err := m.Repository.WithDB(ctx, func(ctx context.Context) error { diff --git a/internal/api/resolver_model_performer.go b/internal/api/resolver_model_performer.go index b6f6af369ad..94da629323d 100644 --- a/internal/api/resolver_model_performer.go +++ b/internal/api/resolver_model_performer.go @@ -268,6 +268,19 @@ func (r *performerResolver) Groups(ctx context.Context, obj *models.Performer) ( return ret, nil } +func (r *performerResolver) CustomFields(ctx context.Context, obj *models.Performer) (map[string]interface{}, error) { + m, err := loaders.From(ctx).PerformerCustomFields.Load(obj.ID) + if err != nil { + return nil, err + } + + if m == nil { + return make(map[string]interface{}), nil + } + + return m, nil +} + // deprecated func (r *performerResolver) Movies(ctx context.Context, obj *models.Performer) (ret []*models.Group, err error) { return r.Groups(ctx, obj) diff --git a/internal/api/resolver_mutation_configure.go b/internal/api/resolver_mutation_configure.go index c4356ff5857..d9c71b09fca 100644 --- a/internal/api/resolver_mutation_configure.go +++ b/internal/api/resolver_mutation_configure.go @@ -645,13 +645,13 @@ func (r *mutationResolver) ConfigureUI(ctx context.Context, input map[string]int if input != nil { // #5483 - convert JSON numbers to float64 or int64 - input = utils.ConvertMapJSONNumbers(input) + input = convertMapJSONNumbers(input) c.SetUIConfiguration(input) } if partial != nil { // #5483 - convert JSON numbers to float64 or int64 - partial = utils.ConvertMapJSONNumbers(partial) + partial = convertMapJSONNumbers(partial) // merge partial into existing config existing := c.GetUIConfiguration() utils.MergeMaps(existing, partial) @@ -672,9 +672,9 @@ func (r *mutationResolver) ConfigureUISetting(ctx context.Context, key string, v // #5483 - convert JSON numbers to float64 or int64 if m, ok := value.(map[string]interface{}); ok { - value = utils.ConvertMapJSONNumbers(m) + value = convertMapJSONNumbers(m) } else if n, ok := value.(json.Number); ok { - value = utils.JSONNumberToNumber(n) + value = jsonNumberToNumber(n) } cfg.Set(key, value) @@ -686,7 +686,7 @@ func (r *mutationResolver) ConfigurePlugin(ctx context.Context, pluginID string, c := config.GetInstance() // #5483 - convert JSON numbers to float64 or int64 - input = utils.ConvertMapJSONNumbers(input) + input = convertMapJSONNumbers(input) c.SetPluginConfiguration(pluginID, input) if err := c.Write(); err != nil { diff --git a/internal/api/resolver_mutation_performer.go b/internal/api/resolver_mutation_performer.go index 87f0883ed24..47b02147de0 100644 --- a/internal/api/resolver_mutation_performer.go +++ b/internal/api/resolver_mutation_performer.go @@ -108,7 +108,13 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input models.Per return err } - err = qb.Create(ctx, &newPerformer) + i := &models.CreatePerformerInput{ + Performer: &newPerformer, + // convert json.Numbers to int/float + CustomFields: convertMapJSONNumbers(input.CustomFields), + } + + err = qb.Create(ctx, i) if err != nil { return err } @@ -290,6 +296,11 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input models.Per return nil, fmt.Errorf("converting tag ids: %w", err) } + updatedPerformer.CustomFields = input.CustomFields + // convert json.Numbers to int/float + updatedPerformer.CustomFields.Full = convertMapJSONNumbers(updatedPerformer.CustomFields.Full) + updatedPerformer.CustomFields.Partial = convertMapJSONNumbers(updatedPerformer.CustomFields.Partial) + var imageData []byte imageIncluded := translator.hasField("image") if input.Image != nil { diff --git a/internal/autotag/integration_test.go b/internal/autotag/integration_test.go index e74cb30aa66..565d73853c4 100644 --- a/internal/autotag/integration_test.go +++ b/internal/autotag/integration_test.go @@ -91,7 +91,7 @@ func createPerformer(ctx context.Context, pqb models.PerformerWriter) error { Name: testName, } - err := pqb.Create(ctx, &performer) + err := pqb.Create(ctx, &models.CreatePerformerInput{Performer: &performer}) if err != nil { return err } diff --git a/internal/identify/performer.go b/internal/identify/performer.go index 947bb09d6f8..7ee66b500c7 100644 --- a/internal/identify/performer.go +++ b/internal/identify/performer.go @@ -41,7 +41,7 @@ func createMissingPerformer(ctx context.Context, endpoint string, w PerformerCre return nil, err } - err = w.Create(ctx, newPerformer) + err = w.Create(ctx, &models.CreatePerformerInput{Performer: newPerformer}) if err != nil { return nil, fmt.Errorf("error creating performer: %w", err) } diff --git a/internal/identify/performer_test.go b/internal/identify/performer_test.go index 09690959de0..8d443763aa3 100644 --- a/internal/identify/performer_test.go +++ b/internal/identify/performer_test.go @@ -24,8 +24,8 @@ func Test_getPerformerID(t *testing.T) { db := mocks.NewDatabase() - db.Performer.On("Create", testCtx, mock.Anything).Run(func(args mock.Arguments) { - p := args.Get(1).(*models.Performer) + db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) { + p := args.Get(1).(*models.CreatePerformerInput) p.ID = validStoredID }).Return(nil) @@ -154,14 +154,14 @@ func Test_createMissingPerformer(t *testing.T) { db := mocks.NewDatabase() - db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.Performer) bool { + db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.CreatePerformerInput) bool { return p.Name == validName })).Run(func(args mock.Arguments) { - p := args.Get(1).(*models.Performer) + p := args.Get(1).(*models.CreatePerformerInput) p.ID = performerID }).Return(nil) - db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.Performer) bool { + db.Performer.On("Create", testCtx, mock.MatchedBy(func(p *models.CreatePerformerInput) bool { return p.Name == invalidName })).Return(errors.New("error creating performer")) diff --git a/internal/manager/task_stash_box_tag.go b/internal/manager/task_stash_box_tag.go index 8bb39960140..e26edc8b1ab 100644 --- a/internal/manager/task_stash_box_tag.go +++ b/internal/manager/task_stash_box_tag.go @@ -194,7 +194,7 @@ func (t *StashBoxBatchTagTask) processMatchedPerformer(ctx context.Context, p *m return err } - if err := qb.Create(ctx, newPerformer); err != nil { + if err := qb.Create(ctx, &models.CreatePerformerInput{Performer: newPerformer}); err != nil { return err } diff --git a/pkg/gallery/import.go b/pkg/gallery/import.go index aaf37bd27e4..7cdf53691ad 100644 --- a/pkg/gallery/import.go +++ b/pkg/gallery/import.go @@ -188,7 +188,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod newPerformer := models.NewPerformer() newPerformer.Name = name - err := i.PerformerWriter.Create(ctx, &newPerformer) + err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{ + Performer: &newPerformer, + }) if err != nil { return nil, err } diff --git a/pkg/gallery/import_test.go b/pkg/gallery/import_test.go index ec2cf7a77f5..b64f80d8f6b 100644 --- a/pkg/gallery/import_test.go +++ b/pkg/gallery/import_test.go @@ -201,8 +201,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) { } db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) - db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) { - performer := args.Get(1).(*models.Performer) + db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) { + performer := args.Get(1).(*models.CreatePerformerInput) performer.ID = existingPerformerID }).Return(nil) @@ -235,7 +235,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { } db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once() - db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error")) + db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/image/import.go b/pkg/image/import.go index 660eb1da18d..ec200af047f 100644 --- a/pkg/image/import.go +++ b/pkg/image/import.go @@ -274,7 +274,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod newPerformer := models.NewPerformer() newPerformer.Name = name - err := i.PerformerWriter.Create(ctx, &newPerformer) + err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{ + Performer: &newPerformer, + }) if err != nil { return nil, err } diff --git a/pkg/image/import_test.go b/pkg/image/import_test.go index 9d63dd02e92..286e51fe34b 100644 --- a/pkg/image/import_test.go +++ b/pkg/image/import_test.go @@ -163,8 +163,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) { } db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) - db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) { - performer := args.Get(1).(*models.Performer) + db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) { + performer := args.Get(1).(*models.CreatePerformerInput) performer.ID = existingPerformerID }).Return(nil) @@ -197,7 +197,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { } db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once() - db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error")) + db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/models/custom_fields.go b/pkg/models/custom_fields.go new file mode 100644 index 00000000000..977c2fe89f3 --- /dev/null +++ b/pkg/models/custom_fields.go @@ -0,0 +1,17 @@ +package models + +import "context" + +type CustomFieldMap map[string]interface{} + +type CustomFieldsInput struct { + // If populated, the entire custom fields map will be replaced with this value + Full map[string]interface{} `json:"full"` + // If populated, only the keys in this map will be updated + Partial map[string]interface{} `json:"partial"` +} + +type CustomFieldsReader interface { + GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) + GetCustomFieldsBulk(ctx context.Context, ids []int) ([]CustomFieldMap, error) +} diff --git a/pkg/models/filter.go b/pkg/models/filter.go index 577aef42be9..2d25f651636 100644 --- a/pkg/models/filter.go +++ b/pkg/models/filter.go @@ -194,3 +194,9 @@ type PhashDistanceCriterionInput struct { type OrientationCriterionInput struct { Value []OrientationEnum `json:"value"` } + +type CustomFieldCriterionInput struct { + Field string `json:"field"` + Value []any `json:"value"` + Modifier CriterionModifier `json:"modifier"` +} diff --git a/pkg/models/jsonschema/performer.go b/pkg/models/jsonschema/performer.go index 7ffa69983b4..5edd5724c63 100644 --- a/pkg/models/jsonschema/performer.go +++ b/pkg/models/jsonschema/performer.go @@ -65,6 +65,8 @@ type Performer struct { StashIDs []models.StashID `json:"stash_ids,omitempty"` IgnoreAutoTag bool `json:"ignore_auto_tag,omitempty"` + CustomFields map[string]interface{} `json:"custom_fields,omitempty"` + // deprecated - for import only URL string `json:"url,omitempty"` Twitter string `json:"twitter,omitempty"` diff --git a/pkg/models/mocks/PerformerReaderWriter.go b/pkg/models/mocks/PerformerReaderWriter.go index 0f3e2be02b6..dbf19a3cdce 100644 --- a/pkg/models/mocks/PerformerReaderWriter.go +++ b/pkg/models/mocks/PerformerReaderWriter.go @@ -80,11 +80,11 @@ func (_m *PerformerReaderWriter) CountByTagID(ctx context.Context, tagID int) (i } // Create provides a mock function with given fields: ctx, newPerformer -func (_m *PerformerReaderWriter) Create(ctx context.Context, newPerformer *models.Performer) error { +func (_m *PerformerReaderWriter) Create(ctx context.Context, newPerformer *models.CreatePerformerInput) error { ret := _m.Called(ctx, newPerformer) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Performer) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.CreatePerformerInput) error); ok { r0 = rf(ctx, newPerformer) } else { r0 = ret.Error(0) @@ -314,6 +314,52 @@ func (_m *PerformerReaderWriter) GetAliases(ctx context.Context, relatedID int) return r0, r1 } +// GetCustomFields provides a mock function with given fields: ctx, id +func (_m *PerformerReaderWriter) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + ret := _m.Called(ctx, id) + + var r0 map[string]interface{} + if rf, ok := ret.Get(0).(func(context.Context, int) map[string]interface{}); ok { + r0 = rf(ctx, id) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(map[string]interface{}) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, int) error); ok { + r1 = rf(ctx, id) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// GetCustomFieldsBulk provides a mock function with given fields: ctx, ids +func (_m *PerformerReaderWriter) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + ret := _m.Called(ctx, ids) + + var r0 []models.CustomFieldMap + if rf, ok := ret.Get(0).(func(context.Context, []int) []models.CustomFieldMap); ok { + r0 = rf(ctx, ids) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]models.CustomFieldMap) + } + } + + var r1 error + if rf, ok := ret.Get(1).(func(context.Context, []int) error); ok { + r1 = rf(ctx, ids) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + // GetImage provides a mock function with given fields: ctx, performerID func (_m *PerformerReaderWriter) GetImage(ctx context.Context, performerID int) ([]byte, error) { ret := _m.Called(ctx, performerID) @@ -502,11 +548,11 @@ func (_m *PerformerReaderWriter) QueryForAutoTag(ctx context.Context, words []st } // Update provides a mock function with given fields: ctx, updatedPerformer -func (_m *PerformerReaderWriter) Update(ctx context.Context, updatedPerformer *models.Performer) error { +func (_m *PerformerReaderWriter) Update(ctx context.Context, updatedPerformer *models.UpdatePerformerInput) error { ret := _m.Called(ctx, updatedPerformer) var r0 error - if rf, ok := ret.Get(0).(func(context.Context, *models.Performer) error); ok { + if rf, ok := ret.Get(0).(func(context.Context, *models.UpdatePerformerInput) error); ok { r0 = rf(ctx, updatedPerformer) } else { r0 = ret.Error(0) diff --git a/pkg/models/model_performer.go b/pkg/models/model_performer.go index 85257ba38a4..566dcae1eff 100644 --- a/pkg/models/model_performer.go +++ b/pkg/models/model_performer.go @@ -39,6 +39,18 @@ type Performer struct { StashIDs RelatedStashIDs `json:"stash_ids"` } +type CreatePerformerInput struct { + *Performer + + CustomFields map[string]interface{} `json:"custom_fields"` +} + +type UpdatePerformerInput struct { + *Performer + + CustomFields CustomFieldsInput `json:"custom_fields"` +} + func NewPerformer() Performer { currentTime := time.Now() return Performer{ @@ -80,6 +92,8 @@ type PerformerPartial struct { Aliases *UpdateStrings TagIDs *UpdateIDs StashIDs *UpdateStashIDs + + CustomFields CustomFieldsInput } func NewPerformerPartial() PerformerPartial { diff --git a/pkg/models/performer.go b/pkg/models/performer.go index 47394996d3f..7301afb83bb 100644 --- a/pkg/models/performer.go +++ b/pkg/models/performer.go @@ -198,6 +198,9 @@ type PerformerFilterType struct { CreatedAt *TimestampCriterionInput `json:"created_at"` // Filter by updated at UpdatedAt *TimestampCriterionInput `json:"updated_at"` + + // Filter by custom fields + CustomFields []CustomFieldCriterionInput `json:"custom_fields"` } type PerformerCreateInput struct { @@ -234,6 +237,8 @@ type PerformerCreateInput struct { HairColor *string `json:"hair_color"` Weight *int `json:"weight"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + + CustomFields map[string]interface{} `json:"custom_fields"` } type PerformerUpdateInput struct { @@ -271,4 +276,6 @@ type PerformerUpdateInput struct { HairColor *string `json:"hair_color"` Weight *int `json:"weight"` IgnoreAutoTag *bool `json:"ignore_auto_tag"` + + CustomFields CustomFieldsInput `json:"custom_fields"` } diff --git a/pkg/models/repository_performer.go b/pkg/models/repository_performer.go index 3fd93619011..ad0b61da0f7 100644 --- a/pkg/models/repository_performer.go +++ b/pkg/models/repository_performer.go @@ -43,12 +43,12 @@ type PerformerCounter interface { // PerformerCreator provides methods to create performers. type PerformerCreator interface { - Create(ctx context.Context, newPerformer *Performer) error + Create(ctx context.Context, newPerformer *CreatePerformerInput) error } // PerformerUpdater provides methods to update performers. type PerformerUpdater interface { - Update(ctx context.Context, updatedPerformer *Performer) error + Update(ctx context.Context, updatedPerformer *UpdatePerformerInput) error UpdatePartial(ctx context.Context, id int, updatedPerformer PerformerPartial) (*Performer, error) UpdateImage(ctx context.Context, performerID int, image []byte) error } @@ -80,6 +80,8 @@ type PerformerReader interface { TagIDLoader URLLoader + CustomFieldsReader + All(ctx context.Context) ([]*Performer, error) GetImage(ctx context.Context, performerID int) ([]byte, error) HasImage(ctx context.Context, performerID int) (bool, error) diff --git a/pkg/performer/export.go b/pkg/performer/export.go index 8f720338f3d..1455fb7bfa0 100644 --- a/pkg/performer/export.go +++ b/pkg/performer/export.go @@ -17,6 +17,7 @@ type ImageAliasStashIDGetter interface { models.AliasLoader models.StashIDLoader models.URLLoader + models.CustomFieldsReader } // ToJSON converts a Performer object into its JSON equivalent. @@ -87,6 +88,12 @@ func ToJSON(ctx context.Context, reader ImageAliasStashIDGetter, performer *mode newPerformerJSON.StashIDs = performer.StashIDs.List() + var err error + newPerformerJSON.CustomFields, err = reader.GetCustomFields(ctx, performer.ID) + if err != nil { + return nil, fmt.Errorf("getting performer custom fields: %v", err) + } + image, err := reader.GetImage(ctx, performer.ID) if err != nil { logger.Errorf("Error getting performer image: %v", err) diff --git a/pkg/performer/export_test.go b/pkg/performer/export_test.go index 36353b17de7..e51049e1491 100644 --- a/pkg/performer/export_test.go +++ b/pkg/performer/export_test.go @@ -15,9 +15,11 @@ import ( ) const ( - performerID = 1 - noImageID = 2 - errImageID = 3 + performerID = 1 + noImageID = 2 + errImageID = 3 + customFieldsID = 4 + errCustomFieldsID = 5 ) const ( @@ -50,6 +52,11 @@ var ( penisLength = 1.23 circumcisedEnum = models.CircumisedEnumCut circumcised = circumcisedEnum.String() + + emptyCustomFields = make(map[string]interface{}) + customFields = map[string]interface{}{ + "customField1": "customValue1", + } ) var imageBytes = []byte("imageBytes") @@ -118,8 +125,8 @@ func createEmptyPerformer(id int) models.Performer { } } -func createFullJSONPerformer(name string, image string) *jsonschema.Performer { - return &jsonschema.Performer{ +func createFullJSONPerformer(name string, image string, withCustomFields bool) *jsonschema.Performer { + ret := &jsonschema.Performer{ Name: name, Disambiguation: disambiguation, URLs: []string{url, twitter, instagram}, @@ -152,7 +159,13 @@ func createFullJSONPerformer(name string, image string) *jsonschema.Performer { Weight: weight, StashIDs: stashIDs, IgnoreAutoTag: autoTagIgnored, + CustomFields: emptyCustomFields, } + + if withCustomFields { + ret.CustomFields = customFields + } + return ret } func createEmptyJSONPerformer() *jsonschema.Performer { @@ -166,13 +179,15 @@ func createEmptyJSONPerformer() *jsonschema.Performer { UpdatedAt: json.JSONTime{ Time: updateTime, }, + CustomFields: emptyCustomFields, } } type testScenario struct { - input models.Performer - expected *jsonschema.Performer - err bool + input models.Performer + customFields map[string]interface{} + expected *jsonschema.Performer + err bool } var scenarios []testScenario @@ -181,20 +196,36 @@ func initTestTable() { scenarios = []testScenario{ { *createFullPerformer(performerID, performerName), - createFullJSONPerformer(performerName, image), + emptyCustomFields, + createFullJSONPerformer(performerName, image, false), + false, + }, + { + *createFullPerformer(customFieldsID, performerName), + customFields, + createFullJSONPerformer(performerName, image, true), false, }, { createEmptyPerformer(noImageID), + emptyCustomFields, createEmptyJSONPerformer(), false, }, { *createFullPerformer(errImageID, performerName), - createFullJSONPerformer(performerName, ""), + emptyCustomFields, + createFullJSONPerformer(performerName, "", false), // failure to get image should not cause an error false, }, + { + *createFullPerformer(errCustomFieldsID, performerName), + customFields, + nil, + // failure to get custom fields should cause an error + true, + }, } } @@ -204,11 +235,19 @@ func TestToJSON(t *testing.T) { db := mocks.NewDatabase() imageErr := errors.New("error getting image") + customFieldsErr := errors.New("error getting custom fields") db.Performer.On("GetImage", testCtx, performerID).Return(imageBytes, nil).Once() + db.Performer.On("GetImage", testCtx, customFieldsID).Return(imageBytes, nil).Once() db.Performer.On("GetImage", testCtx, noImageID).Return(nil, nil).Once() db.Performer.On("GetImage", testCtx, errImageID).Return(nil, imageErr).Once() + db.Performer.On("GetCustomFields", testCtx, performerID).Return(emptyCustomFields, nil).Once() + db.Performer.On("GetCustomFields", testCtx, customFieldsID).Return(customFields, nil).Once() + db.Performer.On("GetCustomFields", testCtx, noImageID).Return(emptyCustomFields, nil).Once() + db.Performer.On("GetCustomFields", testCtx, errImageID).Return(emptyCustomFields, nil).Once() + db.Performer.On("GetCustomFields", testCtx, errCustomFieldsID).Return(nil, customFieldsErr).Once() + for i, s := range scenarios { tag := s.input json, err := ToJSON(testCtx, db.Performer, &tag) diff --git a/pkg/performer/import.go b/pkg/performer/import.go index 49a2ce291ae..3aaacdb8b69 100644 --- a/pkg/performer/import.go +++ b/pkg/performer/import.go @@ -25,13 +25,15 @@ type Importer struct { Input jsonschema.Performer MissingRefBehaviour models.ImportMissingRefEnum - ID int - performer models.Performer - imageData []byte + ID int + performer models.Performer + customFields models.CustomFieldMap + imageData []byte } func (i *Importer) PreImport(ctx context.Context) error { i.performer = performerJSONToPerformer(i.Input) + i.customFields = i.Input.CustomFields if err := i.populateTags(ctx); err != nil { return err @@ -165,7 +167,10 @@ func (i *Importer) FindExistingID(ctx context.Context) (*int, error) { } func (i *Importer) Create(ctx context.Context) (*int, error) { - err := i.ReaderWriter.Create(ctx, &i.performer) + err := i.ReaderWriter.Create(ctx, &models.CreatePerformerInput{ + Performer: &i.performer, + CustomFields: i.customFields, + }) if err != nil { return nil, fmt.Errorf("error creating performer: %v", err) } @@ -175,9 +180,13 @@ func (i *Importer) Create(ctx context.Context) (*int, error) { } func (i *Importer) Update(ctx context.Context, id int) error { - performer := i.performer - performer.ID = id - err := i.ReaderWriter.Update(ctx, &performer) + i.performer.ID = id + err := i.ReaderWriter.Update(ctx, &models.UpdatePerformerInput{ + Performer: &i.performer, + CustomFields: models.CustomFieldsInput{ + Full: i.customFields, + }, + }) if err != nil { return fmt.Errorf("error updating existing performer: %v", err) } diff --git a/pkg/performer/import_test.go b/pkg/performer/import_test.go index 1ee569892d4..0a3f862914a 100644 --- a/pkg/performer/import_test.go +++ b/pkg/performer/import_test.go @@ -53,13 +53,14 @@ func TestImporterPreImport(t *testing.T) { assert.NotNil(t, err) - i.Input = *createFullJSONPerformer(performerName, image) + i.Input = *createFullJSONPerformer(performerName, image, true) err = i.PreImport(testCtx) assert.Nil(t, err) expectedPerformer := *createFullPerformer(0, performerName) assert.Equal(t, expectedPerformer, i.performer) + assert.Equal(t, models.CustomFieldMap(customFields), i.customFields) } func TestImporterPreImportWithTag(t *testing.T) { @@ -234,10 +235,18 @@ func TestCreate(t *testing.T) { Name: performerName, } + performerInput := models.CreatePerformerInput{ + Performer: &performer, + } + performerErr := models.Performer{ Name: performerNameErr, } + performerErrInput := models.CreatePerformerInput{ + Performer: &performerErr, + } + i := Importer{ ReaderWriter: db.Performer, TagWriter: db.Tag, @@ -245,11 +254,11 @@ func TestCreate(t *testing.T) { } errCreate := errors.New("Create error") - db.Performer.On("Create", testCtx, &performer).Run(func(args mock.Arguments) { - arg := args.Get(1).(*models.Performer) + db.Performer.On("Create", testCtx, &performerInput).Run(func(args mock.Arguments) { + arg := args.Get(1).(*models.CreatePerformerInput) arg.ID = performerID }).Return(nil).Once() - db.Performer.On("Create", testCtx, &performerErr).Return(errCreate).Once() + db.Performer.On("Create", testCtx, &performerErrInput).Return(errCreate).Once() id, err := i.Create(testCtx) assert.Equal(t, performerID, *id) @@ -284,7 +293,10 @@ func TestUpdate(t *testing.T) { // id needs to be set for the mock input performer.ID = performerID - db.Performer.On("Update", testCtx, &performer).Return(nil).Once() + performerInput := models.UpdatePerformerInput{ + Performer: &performer, + } + db.Performer.On("Update", testCtx, &performerInput).Return(nil).Once() err := i.Update(testCtx, performerID) assert.Nil(t, err) @@ -293,7 +305,10 @@ func TestUpdate(t *testing.T) { // need to set id separately performerErr.ID = errImageID - db.Performer.On("Update", testCtx, &performerErr).Return(errUpdate).Once() + performerErrInput := models.UpdatePerformerInput{ + Performer: &performerErr, + } + db.Performer.On("Update", testCtx, &performerErrInput).Return(errUpdate).Once() err = i.Update(testCtx, errImageID) assert.NotNil(t, err) diff --git a/pkg/scene/import.go b/pkg/scene/import.go index c1b065bcf8a..e1248a77c3d 100644 --- a/pkg/scene/import.go +++ b/pkg/scene/import.go @@ -325,7 +325,9 @@ func (i *Importer) createPerformers(ctx context.Context, names []string) ([]*mod newPerformer := models.NewPerformer() newPerformer.Name = name - err := i.PerformerWriter.Create(ctx, &newPerformer) + err := i.PerformerWriter.Create(ctx, &models.CreatePerformerInput{ + Performer: &newPerformer, + }) if err != nil { return nil, err } diff --git a/pkg/scene/import_test.go b/pkg/scene/import_test.go index 0e37dce16db..a6e3edcdfdb 100644 --- a/pkg/scene/import_test.go +++ b/pkg/scene/import_test.go @@ -327,8 +327,8 @@ func TestImporterPreImportWithMissingPerformer(t *testing.T) { } db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Times(3) - db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Run(func(args mock.Arguments) { - p := args.Get(1).(*models.Performer) + db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Run(func(args mock.Arguments) { + p := args.Get(1).(*models.CreatePerformerInput) p.ID = existingPerformerID }).Return(nil) @@ -361,7 +361,7 @@ func TestImporterPreImportWithMissingPerformerCreateErr(t *testing.T) { } db.Performer.On("FindByNames", testCtx, []string{missingPerformerName}, false).Return(nil, nil).Once() - db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.Performer")).Return(errors.New("Create error")) + db.Performer.On("Create", testCtx, mock.AnythingOfType("*models.CreatePerformerInput")).Return(errors.New("Create error")) err := i.PreImport(testCtx) assert.NotNil(t, err) diff --git a/pkg/sqlite/anonymise.go b/pkg/sqlite/anonymise.go index 519489abfc6..f30779fd3bc 100644 --- a/pkg/sqlite/anonymise.go +++ b/pkg/sqlite/anonymise.go @@ -600,6 +600,10 @@ func (db *Anonymiser) anonymisePerformers(ctx context.Context) error { return err } + if err := db.anonymiseCustomFields(ctx, goqu.T(performersCustomFieldsTable.GetTable()), "performer_id"); err != nil { + return err + } + return nil } @@ -1050,3 +1054,73 @@ func (db *Anonymiser) obfuscateString(in string, dict string) string { return out.String() } + +func (db *Anonymiser) anonymiseCustomFields(ctx context.Context, table exp.IdentifierExpression, idColumn string) error { + lastID := 0 + lastField := "" + total := 0 + const logEvery = 10000 + + for gotSome := true; gotSome; { + if err := txn.WithTxn(ctx, db, func(ctx context.Context) error { + query := dialect.From(table).Select( + table.Col(idColumn), + table.Col("field"), + table.Col("value"), + ).Where( + goqu.L("("+idColumn+", field)").Gt(goqu.L("(?, ?)", lastID, lastField)), + ).Order( + table.Col(idColumn).Asc(), table.Col("field").Asc(), + ).Limit(1000) + + gotSome = false + + const single = false + return queryFunc(ctx, query, single, func(rows *sqlx.Rows) error { + var ( + id int + field string + value string + ) + + if err := rows.Scan( + &id, + &field, + &value, + ); err != nil { + return err + } + + set := goqu.Record{} + set["field"] = db.obfuscateString(field, letters) + set["value"] = db.obfuscateString(value, letters) + + if len(set) > 0 { + stmt := dialect.Update(table).Set(set).Where( + table.Col(idColumn).Eq(id), + table.Col("field").Eq(field), + ) + + if _, err := exec(ctx, stmt); err != nil { + return fmt.Errorf("anonymising %s: %w", table.GetTable(), err) + } + } + + lastID = id + lastField = field + gotSome = true + total++ + + if total%logEvery == 0 { + logger.Infof("Anonymised %d %s custom fields", total, table.GetTable()) + } + + return nil + }) + }); err != nil { + return err + } + } + + return nil +} diff --git a/pkg/sqlite/custom_fields.go b/pkg/sqlite/custom_fields.go new file mode 100644 index 00000000000..bac6ae5e17f --- /dev/null +++ b/pkg/sqlite/custom_fields.go @@ -0,0 +1,308 @@ +package sqlite + +import ( + "context" + "fmt" + "regexp" + "strings" + + "github.com/doug-martin/goqu/v9" + "github.com/doug-martin/goqu/v9/exp" + "github.com/jmoiron/sqlx" + "github.com/stashapp/stash/pkg/models" +) + +const maxCustomFieldNameLength = 64 + +type customFieldsStore struct { + table exp.IdentifierExpression + fk exp.IdentifierExpression +} + +func (s *customFieldsStore) deleteForID(ctx context.Context, id int) error { + table := s.table + q := dialect.Delete(table).Where(s.fk.Eq(id)) + _, err := exec(ctx, q) + if err != nil { + return fmt.Errorf("deleting from %s: %w", s.table.GetTable(), err) + } + + return nil +} + +func (s *customFieldsStore) SetCustomFields(ctx context.Context, id int, values models.CustomFieldsInput) error { + var partial bool + var valMap map[string]interface{} + + switch { + case values.Full != nil: + partial = false + valMap = values.Full + case values.Partial != nil: + partial = true + valMap = values.Partial + default: + return nil + } + + if err := s.validateCustomFields(valMap); err != nil { + return err + } + + return s.setCustomFields(ctx, id, valMap, partial) +} + +func (s *customFieldsStore) validateCustomFields(values map[string]interface{}) error { + // ensure that custom field names are valid + // no leading or trailing whitespace, no empty strings + for k := range values { + if err := s.validateCustomFieldName(k); err != nil { + return fmt.Errorf("custom field name %q: %w", k, err) + } + } + + return nil +} + +func (s *customFieldsStore) validateCustomFieldName(fieldName string) error { + // ensure that custom field names are valid + // no leading or trailing whitespace, no empty strings + if strings.TrimSpace(fieldName) == "" { + return fmt.Errorf("custom field name cannot be empty") + } + if fieldName != strings.TrimSpace(fieldName) { + return fmt.Errorf("custom field name cannot have leading or trailing whitespace") + } + if len(fieldName) > maxCustomFieldNameLength { + return fmt.Errorf("custom field name must be less than %d characters", maxCustomFieldNameLength+1) + } + return nil +} + +func getSQLValueFromCustomFieldInput(input interface{}) (interface{}, error) { + switch v := input.(type) { + case []interface{}, map[string]interface{}: + // TODO - in future it would be nice to convert to a JSON string + // however, we would need some way to differentiate between a JSON string and a regular string + // for now, we will not support objects and arrays + return nil, fmt.Errorf("unsupported custom field value type: %T", input) + default: + return v, nil + } +} + +func (s *customFieldsStore) sqlValueToValue(value interface{}) interface{} { + // TODO - if we ever support objects and arrays we will need to add support here + return value +} + +func (s *customFieldsStore) setCustomFields(ctx context.Context, id int, values map[string]interface{}, partial bool) error { + if !partial { + // delete existing custom fields + if err := s.deleteForID(ctx, id); err != nil { + return err + } + } + + if len(values) == 0 { + return nil + } + + conflictKey := s.fk.GetCol().(string) + ", field" + // upsert new custom fields + q := dialect.Insert(s.table).Prepared(true).Cols(s.fk, "field", "value"). + OnConflict(goqu.DoUpdate(conflictKey, goqu.Record{"value": goqu.I("excluded.value")})) + r := make([]interface{}, len(values)) + var i int + for key, value := range values { + v, err := getSQLValueFromCustomFieldInput(value) + if err != nil { + return fmt.Errorf("getting SQL value for field %q: %w", key, err) + } + r[i] = goqu.Record{"field": key, "value": v, s.fk.GetCol().(string): id} + i++ + } + + if _, err := exec(ctx, q.Rows(r...)); err != nil { + return fmt.Errorf("inserting custom fields: %w", err) + } + + return nil +} + +func (s *customFieldsStore) GetCustomFields(ctx context.Context, id int) (map[string]interface{}, error) { + q := dialect.Select("field", "value").From(s.table).Where(s.fk.Eq(id)) + + const single = false + ret := make(map[string]interface{}) + err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error { + var field string + var value interface{} + if err := rows.Scan(&field, &value); err != nil { + return fmt.Errorf("scanning custom fields: %w", err) + } + ret[field] = s.sqlValueToValue(value) + return nil + }) + if err != nil { + return nil, fmt.Errorf("getting custom fields: %w", err) + } + + return ret, nil +} + +func (s *customFieldsStore) GetCustomFieldsBulk(ctx context.Context, ids []int) ([]models.CustomFieldMap, error) { + q := dialect.Select(s.fk.As("id"), "field", "value").From(s.table).Where(s.fk.In(ids)) + + const single = false + ret := make([]models.CustomFieldMap, len(ids)) + + idi := make(map[int]int, len(ids)) + for i, id := range ids { + idi[id] = i + } + + err := queryFunc(ctx, q, single, func(rows *sqlx.Rows) error { + var id int + var field string + var value interface{} + if err := rows.Scan(&id, &field, &value); err != nil { + return fmt.Errorf("scanning custom fields: %w", err) + } + + i := idi[id] + m := ret[i] + if m == nil { + m = make(map[string]interface{}) + ret[i] = m + } + + m[field] = s.sqlValueToValue(value) + return nil + }) + if err != nil { + return nil, fmt.Errorf("getting custom fields: %w", err) + } + + return ret, nil +} + +type customFieldsFilterHandler struct { + table string + fkCol string + c []models.CustomFieldCriterionInput + idCol string +} + +func (h *customFieldsFilterHandler) innerJoin(f *filterBuilder, as string, field string) { + joinOn := fmt.Sprintf("%s = %s.%s AND %s.field = ?", h.idCol, as, h.fkCol, as) + f.addInnerJoin(h.table, as, joinOn, field) +} + +func (h *customFieldsFilterHandler) leftJoin(f *filterBuilder, as string, field string) { + joinOn := fmt.Sprintf("%s = %s.%s AND %s.field = ?", h.idCol, as, h.fkCol, as) + f.addLeftJoin(h.table, as, joinOn, field) +} + +func (h *customFieldsFilterHandler) handleCriterion(f *filterBuilder, joinAs string, cc models.CustomFieldCriterionInput) { + // convert values + cv := make([]interface{}, len(cc.Value)) + for i, v := range cc.Value { + var err error + cv[i], err = getSQLValueFromCustomFieldInput(v) + if err != nil { + f.setError(err) + return + } + } + + switch cc.Modifier { + case models.CriterionModifierEquals: + h.innerJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("%[1]s.value IN %s", joinAs, getInBinding(len(cv))), cv...) + case models.CriterionModifierNotEquals: + h.innerJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("%[1]s.value NOT IN %s", joinAs, getInBinding(len(cv))), cv...) + case models.CriterionModifierIncludes: + clauses := make([]sqlClause, len(cv)) + for i, v := range cv { + clauses[i] = makeClause(fmt.Sprintf("%s.value LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v)) + } + h.innerJoin(f, joinAs, cc.Field) + f.whereClauses = append(f.whereClauses, clauses...) + case models.CriterionModifierExcludes: + for _, v := range cv { + f.addWhere(fmt.Sprintf("%[1]s.value NOT LIKE ?", joinAs), fmt.Sprintf("%%%v%%", v)) + } + h.leftJoin(f, joinAs, cc.Field) + case models.CriterionModifierMatchesRegex: + for _, v := range cv { + vs, ok := v.(string) + if !ok { + f.setError(fmt.Errorf("unsupported custom field criterion value type: %T", v)) + } + if _, err := regexp.Compile(vs); err != nil { + f.setError(err) + return + } + f.addWhere(fmt.Sprintf("(%s.value regexp ?)", joinAs), v) + } + h.innerJoin(f, joinAs, cc.Field) + case models.CriterionModifierNotMatchesRegex: + for _, v := range cv { + vs, ok := v.(string) + if !ok { + f.setError(fmt.Errorf("unsupported custom field criterion value type: %T", v)) + } + if _, err := regexp.Compile(vs); err != nil { + f.setError(err) + return + } + f.addWhere(fmt.Sprintf("(%s.value IS NULL OR %[1]s.value NOT regexp ?)", joinAs), v) + } + h.leftJoin(f, joinAs, cc.Field) + case models.CriterionModifierIsNull: + h.leftJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("%s.value IS NULL OR TRIM(%[1]s.value) = ''", joinAs)) + case models.CriterionModifierNotNull: + h.innerJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("TRIM(%[1]s.value) != ''", joinAs)) + case models.CriterionModifierBetween: + if len(cv) != 2 { + f.setError(fmt.Errorf("expected 2 values for custom field criterion modifier BETWEEN, got %d", len(cv))) + return + } + h.innerJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("%s.value BETWEEN ? AND ?", joinAs), cv[0], cv[1]) + case models.CriterionModifierNotBetween: + h.innerJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("%s.value NOT BETWEEN ? AND ?", joinAs), cv[0], cv[1]) + case models.CriterionModifierLessThan: + if len(cv) != 1 { + f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv))) + return + } + h.innerJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("%s.value < ?", joinAs), cv[0]) + case models.CriterionModifierGreaterThan: + if len(cv) != 1 { + f.setError(fmt.Errorf("expected 1 value for custom field criterion modifier LESS_THAN, got %d", len(cv))) + return + } + h.innerJoin(f, joinAs, cc.Field) + f.addWhere(fmt.Sprintf("%s.value > ?", joinAs), cv[0]) + default: + f.setError(fmt.Errorf("unsupported custom field criterion modifier: %s", cc.Modifier)) + } +} + +func (h *customFieldsFilterHandler) handle(ctx context.Context, f *filterBuilder) { + if len(h.c) == 0 { + return + } + + for i, cc := range h.c { + join := fmt.Sprintf("custom_fields_%d", i) + h.handleCriterion(f, join, cc) + } +} diff --git a/pkg/sqlite/custom_fields_test.go b/pkg/sqlite/custom_fields_test.go new file mode 100644 index 00000000000..ce5c77487d9 --- /dev/null +++ b/pkg/sqlite/custom_fields_test.go @@ -0,0 +1,176 @@ +//go:build integration +// +build integration + +package sqlite_test + +import ( + "context" + "testing" + + "github.com/stashapp/stash/pkg/models" + "github.com/stretchr/testify/assert" +) + +func TestSetCustomFields(t *testing.T) { + performerIdx := performerIdx1WithScene + + mergeCustomFields := func(i map[string]interface{}) map[string]interface{} { + m := getPerformerCustomFields(performerIdx) + for k, v := range i { + m[k] = v + } + return m + } + + tests := []struct { + name string + input models.CustomFieldsInput + expected map[string]interface{} + wantErr bool + }{ + { + "valid full", + models.CustomFieldsInput{ + Full: map[string]interface{}{ + "key": "value", + }, + }, + map[string]interface{}{ + "key": "value", + }, + false, + }, + { + "valid partial", + models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "key": "value", + }, + }, + mergeCustomFields(map[string]interface{}{ + "key": "value", + }), + false, + }, + { + "valid partial overwrite", + models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "real": float64(4.56), + }, + }, + mergeCustomFields(map[string]interface{}{ + "real": float64(4.56), + }), + false, + }, + { + "leading space full", + models.CustomFieldsInput{ + Full: map[string]interface{}{ + " key": "value", + }, + }, + nil, + true, + }, + { + "trailing space full", + models.CustomFieldsInput{ + Full: map[string]interface{}{ + "key ": "value", + }, + }, + nil, + true, + }, + { + "leading space partial", + models.CustomFieldsInput{ + Partial: map[string]interface{}{ + " key": "value", + }, + }, + nil, + true, + }, + { + "trailing space partial", + models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "key ": "value", + }, + }, + nil, + true, + }, + { + "big key full", + models.CustomFieldsInput{ + Full: map[string]interface{}{ + "12345678901234567890123456789012345678901234567890123456789012345": "value", + }, + }, + nil, + true, + }, + { + "big key partial", + models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "12345678901234567890123456789012345678901234567890123456789012345": "value", + }, + }, + nil, + true, + }, + { + "empty key full", + models.CustomFieldsInput{ + Full: map[string]interface{}{ + "": "value", + }, + }, + nil, + true, + }, + { + "empty key partial", + models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "": "value", + }, + }, + nil, + true, + }, + } + + // use performer custom fields store + store := db.Performer + id := performerIDs[performerIdx] + + assert := assert.New(t) + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + err := store.SetCustomFields(ctx, id, tt.input) + if (err != nil) != tt.wantErr { + t.Errorf("SetCustomFields() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if tt.wantErr { + return + } + + actual, err := store.GetCustomFields(ctx, id) + if err != nil { + t.Errorf("GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.expected, actual) + }) + } +} diff --git a/pkg/sqlite/database.go b/pkg/sqlite/database.go index d2c0a8191e5..5ed803c1753 100644 --- a/pkg/sqlite/database.go +++ b/pkg/sqlite/database.go @@ -34,7 +34,7 @@ const ( cacheSizeEnv = "STASH_SQLITE_CACHE_SIZE" ) -var appSchemaVersion uint = 70 +var appSchemaVersion uint = 71 //go:embed migrations/*.sql var migrationsBox embed.FS diff --git a/pkg/sqlite/filter.go b/pkg/sqlite/filter.go index f4b5e7e7726..143487af235 100644 --- a/pkg/sqlite/filter.go +++ b/pkg/sqlite/filter.go @@ -95,6 +95,7 @@ type join struct { as string onClause string joinType string + args []interface{} } // equals returns true if the other join alias/table is equal to this one @@ -229,12 +230,13 @@ func (f *filterBuilder) not(n *filterBuilder) { // The AS is omitted if as is empty. // This method does not add a join if it its alias/table name is already // present in another existing join. -func (f *filterBuilder) addLeftJoin(table, as, onClause string) { +func (f *filterBuilder) addLeftJoin(table, as, onClause string, args ...interface{}) { newJoin := join{ table: table, as: as, onClause: onClause, joinType: "LEFT", + args: args, } f.joins.add(newJoin) @@ -245,12 +247,13 @@ func (f *filterBuilder) addLeftJoin(table, as, onClause string) { // The AS is omitted if as is empty. // This method does not add a join if it its alias/table name is already // present in another existing join. -func (f *filterBuilder) addInnerJoin(table, as, onClause string) { +func (f *filterBuilder) addInnerJoin(table, as, onClause string, args ...interface{}) { newJoin := join{ table: table, as: as, onClause: onClause, joinType: "INNER", + args: args, } f.joins.add(newJoin) diff --git a/pkg/sqlite/migrations/71_custom_fields.up.sql b/pkg/sqlite/migrations/71_custom_fields.up.sql new file mode 100644 index 00000000000..3440c20b13f --- /dev/null +++ b/pkg/sqlite/migrations/71_custom_fields.up.sql @@ -0,0 +1,9 @@ +CREATE TABLE `performer_custom_fields` ( + `performer_id` integer NOT NULL, + `field` varchar(64) NOT NULL, + `value` BLOB NOT NULL, + PRIMARY KEY (`performer_id`, `field`), + foreign key(`performer_id`) references `performers`(`id`) on delete CASCADE +); + +CREATE INDEX `index_performer_custom_fields_field_value` ON `performer_custom_fields` (`field`, `value`); diff --git a/pkg/sqlite/performer.go b/pkg/sqlite/performer.go index e20dc9c4cc5..e291078b204 100644 --- a/pkg/sqlite/performer.go +++ b/pkg/sqlite/performer.go @@ -226,6 +226,7 @@ var ( type PerformerStore struct { blobJoinQueryBuilder + customFieldsStore tableMgr *table } @@ -236,6 +237,10 @@ func NewPerformerStore(blobStore *BlobStore) *PerformerStore { blobStore: blobStore, joinTable: performerTable, }, + customFieldsStore: customFieldsStore{ + table: performersCustomFieldsTable, + fk: performersCustomFieldsTable.Col(performerIDColumn), + }, tableMgr: performerTableMgr, } } @@ -248,9 +253,9 @@ func (qb *PerformerStore) selectDataset() *goqu.SelectDataset { return dialect.From(qb.table()).Select(qb.table().All()) } -func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performer) error { +func (qb *PerformerStore) Create(ctx context.Context, newObject *models.CreatePerformerInput) error { var r performerRow - r.fromPerformer(*newObject) + r.fromPerformer(*newObject.Performer) id, err := qb.tableMgr.insertID(ctx, r) if err != nil { @@ -282,12 +287,17 @@ func (qb *PerformerStore) Create(ctx context.Context, newObject *models.Performe } } + const partial = false + if err := qb.setCustomFields(ctx, id, newObject.CustomFields, partial); err != nil { + return err + } + updated, err := qb.find(ctx, id) if err != nil { return fmt.Errorf("finding after create: %w", err) } - *newObject = *updated + *newObject.Performer = *updated return nil } @@ -330,12 +340,16 @@ func (qb *PerformerStore) UpdatePartial(ctx context.Context, id int, partial mod } } + if err := qb.SetCustomFields(ctx, id, partial.CustomFields); err != nil { + return nil, err + } + return qb.find(ctx, id) } -func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Performer) error { +func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.UpdatePerformerInput) error { var r performerRow - r.fromPerformer(*updatedObject) + r.fromPerformer(*updatedObject.Performer) if err := qb.tableMgr.updateByID(ctx, updatedObject.ID, r); err != nil { return err @@ -365,6 +379,10 @@ func (qb *PerformerStore) Update(ctx context.Context, updatedObject *models.Perf } } + if err := qb.SetCustomFields(ctx, updatedObject.ID, updatedObject.CustomFields); err != nil { + return err + } + return nil } diff --git a/pkg/sqlite/performer_filter.go b/pkg/sqlite/performer_filter.go index 72990a7febd..ae882c9503e 100644 --- a/pkg/sqlite/performer_filter.go +++ b/pkg/sqlite/performer_filter.go @@ -203,6 +203,13 @@ func (qb *performerFilterHandler) criterionHandler() criterionHandler { performerRepository.tags.innerJoin(f, "performer_tag", "performers.id") }, }, + + &customFieldsFilterHandler{ + table: performersCustomFieldsTable.GetTable(), + fkCol: performerIDColumn, + c: filter.CustomFields, + idCol: "performers.id", + }, } } diff --git a/pkg/sqlite/performer_test.go b/pkg/sqlite/performer_test.go index e0294f3e442..d24b4ca4e6a 100644 --- a/pkg/sqlite/performer_test.go +++ b/pkg/sqlite/performer_test.go @@ -16,6 +16,12 @@ import ( "github.com/stretchr/testify/assert" ) +var testCustomFields = map[string]interface{}{ + "string": "aaa", + "int": int64(123), // int64 to match the type of the field in the database + "real": 1.23, +} + func loadPerformerRelationships(ctx context.Context, expected models.Performer, actual *models.Performer) error { if expected.Aliases.Loaded() { if err := actual.LoadAliases(ctx, db.Performer); err != nil { @@ -81,57 +87,62 @@ func Test_PerformerStore_Create(t *testing.T) { tests := []struct { name string - newObject models.Performer + newObject models.CreatePerformerInput wantErr bool }{ { "full", - models.Performer{ - Name: name, - Disambiguation: disambiguation, - Gender: &gender, - URLs: models.NewRelatedStrings(urls), - Birthdate: &birthdate, - Ethnicity: ethnicity, - Country: country, - EyeColor: eyeColor, - Height: &height, - Measurements: measurements, - FakeTits: fakeTits, - PenisLength: &penisLength, - Circumcised: &circumcised, - CareerLength: careerLength, - Tattoos: tattoos, - Piercings: piercings, - Favorite: favorite, - Rating: &rating, - Details: details, - DeathDate: &deathdate, - HairColor: hairColor, - Weight: &weight, - IgnoreAutoTag: ignoreAutoTag, - TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}), - Aliases: models.NewRelatedStrings(aliases), - StashIDs: models.NewRelatedStashIDs([]models.StashID{ - { - StashID: stashID1, - Endpoint: endpoint1, - }, - { - StashID: stashID2, - Endpoint: endpoint2, - }, - }), - CreatedAt: createdAt, - UpdatedAt: updatedAt, + models.CreatePerformerInput{ + Performer: &models.Performer{ + Name: name, + Disambiguation: disambiguation, + Gender: &gender, + URLs: models.NewRelatedStrings(urls), + Birthdate: &birthdate, + Ethnicity: ethnicity, + Country: country, + EyeColor: eyeColor, + Height: &height, + Measurements: measurements, + FakeTits: fakeTits, + PenisLength: &penisLength, + Circumcised: &circumcised, + CareerLength: careerLength, + Tattoos: tattoos, + Piercings: piercings, + Favorite: favorite, + Rating: &rating, + Details: details, + DeathDate: &deathdate, + HairColor: hairColor, + Weight: &weight, + IgnoreAutoTag: ignoreAutoTag, + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}), + Aliases: models.NewRelatedStrings(aliases), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, + CustomFields: testCustomFields, }, false, }, { "invalid tag id", - models.Performer{ - Name: name, - TagIDs: models.NewRelatedIDs([]int{invalidID}), + models.CreatePerformerInput{ + Performer: &models.Performer{ + Name: name, + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, }, true, }, @@ -155,16 +166,16 @@ func Test_PerformerStore_Create(t *testing.T) { assert.NotZero(p.ID) - copy := tt.newObject + copy := *tt.newObject.Performer copy.ID = p.ID // load relationships - if err := loadPerformerRelationships(ctx, copy, &p); err != nil { + if err := loadPerformerRelationships(ctx, copy, p.Performer); err != nil { t.Errorf("loadPerformerRelationships() error = %v", err) return } - assert.Equal(copy, p) + assert.Equal(copy, *p.Performer) // ensure can find the performer found, err := qb.Find(ctx, p.ID) @@ -183,6 +194,15 @@ func Test_PerformerStore_Create(t *testing.T) { } assert.Equal(copy, *found) + // ensure custom fields are set + cf, err := qb.GetCustomFields(ctx, p.ID) + if err != nil { + t.Errorf("PerformerStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.newObject.CustomFields, cf) + return }) } @@ -228,77 +248,109 @@ func Test_PerformerStore_Update(t *testing.T) { tests := []struct { name string - updatedObject *models.Performer + updatedObject models.UpdatePerformerInput wantErr bool }{ { "full", - &models.Performer{ - ID: performerIDs[performerIdxWithGallery], - Name: name, - Disambiguation: disambiguation, - Gender: &gender, - URLs: models.NewRelatedStrings(urls), - Birthdate: &birthdate, - Ethnicity: ethnicity, - Country: country, - EyeColor: eyeColor, - Height: &height, - Measurements: measurements, - FakeTits: fakeTits, - PenisLength: &penisLength, - Circumcised: &circumcised, - CareerLength: careerLength, - Tattoos: tattoos, - Piercings: piercings, - Favorite: favorite, - Rating: &rating, - Details: details, - DeathDate: &deathdate, - HairColor: hairColor, - Weight: &weight, - IgnoreAutoTag: ignoreAutoTag, - Aliases: models.NewRelatedStrings(aliases), - TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}), - StashIDs: models.NewRelatedStashIDs([]models.StashID{ - { - StashID: stashID1, - Endpoint: endpoint1, - }, - { - StashID: stashID2, - Endpoint: endpoint2, - }, - }), - CreatedAt: createdAt, - UpdatedAt: updatedAt, + models.UpdatePerformerInput{ + Performer: &models.Performer{ + ID: performerIDs[performerIdxWithGallery], + Name: name, + Disambiguation: disambiguation, + Gender: &gender, + URLs: models.NewRelatedStrings(urls), + Birthdate: &birthdate, + Ethnicity: ethnicity, + Country: country, + EyeColor: eyeColor, + Height: &height, + Measurements: measurements, + FakeTits: fakeTits, + PenisLength: &penisLength, + Circumcised: &circumcised, + CareerLength: careerLength, + Tattoos: tattoos, + Piercings: piercings, + Favorite: favorite, + Rating: &rating, + Details: details, + DeathDate: &deathdate, + HairColor: hairColor, + Weight: &weight, + IgnoreAutoTag: ignoreAutoTag, + Aliases: models.NewRelatedStrings(aliases), + TagIDs: models.NewRelatedIDs([]int{tagIDs[tagIdx1WithPerformer], tagIDs[tagIdx1WithDupName]}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{ + { + StashID: stashID1, + Endpoint: endpoint1, + }, + { + StashID: stashID2, + Endpoint: endpoint2, + }, + }), + CreatedAt: createdAt, + UpdatedAt: updatedAt, + }, }, false, }, { "clear nullables", - &models.Performer{ - ID: performerIDs[performerIdxWithGallery], - Aliases: models.NewRelatedStrings([]string{}), - URLs: models.NewRelatedStrings([]string{}), - TagIDs: models.NewRelatedIDs([]int{}), - StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + models.UpdatePerformerInput{ + Performer: &models.Performer{ + ID: performerIDs[performerIdxWithGallery], + Aliases: models.NewRelatedStrings([]string{}), + URLs: models.NewRelatedStrings([]string{}), + TagIDs: models.NewRelatedIDs([]int{}), + StashIDs: models.NewRelatedStashIDs([]models.StashID{}), + }, }, false, }, { "clear tag ids", - &models.Performer{ - ID: performerIDs[sceneIdxWithTag], - TagIDs: models.NewRelatedIDs([]int{}), + models.UpdatePerformerInput{ + Performer: &models.Performer{ + ID: performerIDs[sceneIdxWithTag], + TagIDs: models.NewRelatedIDs([]int{}), + }, + }, + false, + }, + { + "set custom fields", + models.UpdatePerformerInput{ + Performer: &models.Performer{ + ID: performerIDs[performerIdxWithGallery], + }, + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + false, + }, + { + "clear custom fields", + models.UpdatePerformerInput{ + Performer: &models.Performer{ + ID: performerIDs[performerIdxWithGallery], + }, + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, }, false, }, { "invalid tag id", - &models.Performer{ - ID: performerIDs[sceneIdxWithGallery], - TagIDs: models.NewRelatedIDs([]int{invalidID}), + models.UpdatePerformerInput{ + Performer: &models.Performer{ + ID: performerIDs[sceneIdxWithGallery], + TagIDs: models.NewRelatedIDs([]int{invalidID}), + }, }, true, }, @@ -309,9 +361,9 @@ func Test_PerformerStore_Update(t *testing.T) { runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { assert := assert.New(t) - copy := *tt.updatedObject + copy := *tt.updatedObject.Performer - if err := qb.Update(ctx, tt.updatedObject); (err != nil) != tt.wantErr { + if err := qb.Update(ctx, &tt.updatedObject); (err != nil) != tt.wantErr { t.Errorf("PerformerStore.Update() error = %v, wantErr %v", err, tt.wantErr) } @@ -331,6 +383,17 @@ func Test_PerformerStore_Update(t *testing.T) { } assert.Equal(copy, *s) + + // ensure custom fields are correct + if tt.updatedObject.CustomFields.Full != nil { + cf, err := qb.GetCustomFields(ctx, tt.updatedObject.ID) + if err != nil { + t.Errorf("PerformerStore.GetCustomFields() error = %v", err) + return + } + + assert.Equal(tt.updatedObject.CustomFields.Full, cf) + } }) } } @@ -573,6 +636,79 @@ func Test_PerformerStore_UpdatePartial(t *testing.T) { } } +func Test_PerformerStore_UpdatePartialCustomFields(t *testing.T) { + tests := []struct { + name string + id int + partial models.PerformerPartial + expected map[string]interface{} // nil to use the partial + }{ + { + "set custom fields", + performerIDs[performerIdxWithGallery], + models.PerformerPartial{ + CustomFields: models.CustomFieldsInput{ + Full: testCustomFields, + }, + }, + nil, + }, + { + "clear custom fields", + performerIDs[performerIdxWithGallery], + models.PerformerPartial{ + CustomFields: models.CustomFieldsInput{ + Full: map[string]interface{}{}, + }, + }, + nil, + }, + { + "partial custom fields", + performerIDs[performerIdxWithGallery], + models.PerformerPartial{ + CustomFields: models.CustomFieldsInput{ + Partial: map[string]interface{}{ + "string": "bbb", + "new_field": "new", + }, + }, + }, + map[string]interface{}{ + "int": int64(3), + "real": 1.3, + "string": "bbb", + "new_field": "new", + }, + }, + } + for _, tt := range tests { + qb := db.Performer + + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + _, err := qb.UpdatePartial(ctx, tt.id, tt.partial) + if err != nil { + t.Errorf("PerformerStore.UpdatePartial() error = %v", err) + return + } + + // ensure custom fields are correct + cf, err := qb.GetCustomFields(ctx, tt.id) + if err != nil { + t.Errorf("PerformerStore.GetCustomFields() error = %v", err) + return + } + if tt.expected == nil { + assert.Equal(tt.partial.CustomFields.Full, cf) + } else { + assert.Equal(tt.expected, cf) + } + }) + } +} + func TestPerformerFindBySceneID(t *testing.T) { withTxn(func(ctx context.Context) error { pqb := db.Performer @@ -1042,6 +1178,242 @@ func TestPerformerQuery(t *testing.T) { } } +func TestPerformerQueryCustomFields(t *testing.T) { + tests := []struct { + name string + filter *models.PerformerFilterType + includeIdxs []int + excludeIdxs []int + wantErr bool + }{ + { + "equals", + &models.PerformerFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierEquals, + Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")}, + }, + }, + }, + []int{performerIdxWithGallery}, + nil, + false, + }, + { + "not equals", + &models.PerformerFilterType{ + Name: &models.StringCriterionInput{ + Value: getPerformerStringValue(performerIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotEquals, + Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")}, + }, + }, + }, + nil, + []int{performerIdxWithGallery}, + false, + }, + { + "includes", + &models.PerformerFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierIncludes, + Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")[9:]}, + }, + }, + }, + []int{performerIdxWithGallery}, + nil, + false, + }, + { + "excludes", + &models.PerformerFilterType{ + Name: &models.StringCriterionInput{ + Value: getPerformerStringValue(performerIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierExcludes, + Value: []any{getPerformerStringValue(performerIdxWithGallery, "custom")[9:]}, + }, + }, + }, + nil, + []int{performerIdxWithGallery}, + false, + }, + { + "regex", + &models.PerformerFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{".*13_custom"}, + }, + }, + }, + []int{performerIdxWithGallery}, + nil, + false, + }, + { + "invalid regex", + &models.PerformerFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "not matches regex", + &models.PerformerFilterType{ + Name: &models.StringCriterionInput{ + Value: getPerformerStringValue(performerIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{".*13_custom"}, + }, + }, + }, + nil, + []int{performerIdxWithGallery}, + false, + }, + { + "invalid not matches regex", + &models.PerformerFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotMatchesRegex, + Value: []any{"["}, + }, + }, + }, + nil, + nil, + true, + }, + { + "null", + &models.PerformerFilterType{ + Name: &models.StringCriterionInput{ + Value: getPerformerStringValue(performerIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "not existing", + Modifier: models.CriterionModifierIsNull, + }, + }, + }, + []int{performerIdxWithGallery}, + nil, + false, + }, + { + "null", + &models.PerformerFilterType{ + Name: &models.StringCriterionInput{ + Value: getPerformerStringValue(performerIdxWithGallery, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "string", + Modifier: models.CriterionModifierNotNull, + }, + }, + }, + []int{performerIdxWithGallery}, + nil, + false, + }, + { + "between", + &models.PerformerFilterType{ + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierBetween, + Value: []any{0.05, 0.15}, + }, + }, + }, + []int{performerIdx1WithScene}, + nil, + false, + }, + { + "not between", + &models.PerformerFilterType{ + Name: &models.StringCriterionInput{ + Value: getPerformerStringValue(performerIdx1WithScene, "Name"), + Modifier: models.CriterionModifierEquals, + }, + CustomFields: []models.CustomFieldCriterionInput{ + { + Field: "real", + Modifier: models.CriterionModifierNotBetween, + Value: []any{0.05, 0.15}, + }, + }, + }, + nil, + []int{performerIdx1WithScene}, + false, + }, + } + + for _, tt := range tests { + runWithRollbackTxn(t, tt.name, func(t *testing.T, ctx context.Context) { + assert := assert.New(t) + + performers, _, err := db.Performer.Query(ctx, tt.filter, nil) + if (err != nil) != tt.wantErr { + t.Errorf("PerformerStore.Query() error = %v, wantErr %v", err, tt.wantErr) + return + } + + ids := performersToIDs(performers) + include := indexesToIDs(performerIDs, tt.includeIdxs) + exclude := indexesToIDs(performerIDs, tt.excludeIdxs) + + for _, i := range include { + assert.Contains(ids, i) + } + for _, e := range exclude { + assert.NotContains(ids, e) + } + }) + } +} + func TestPerformerQueryPenisLength(t *testing.T) { var upper = 4.0 @@ -1172,7 +1544,7 @@ func TestPerformerUpdatePerformerImage(t *testing.T) { performer := models.Performer{ Name: name, } - err := qb.Create(ctx, &performer) + err := qb.Create(ctx, &models.CreatePerformerInput{Performer: &performer}) if err != nil { return fmt.Errorf("Error creating performer: %s", err.Error()) } @@ -1680,7 +2052,7 @@ func TestPerformerStashIDs(t *testing.T) { performer := &models.Performer{ Name: name, } - if err := qb.Create(ctx, performer); err != nil { + if err := qb.Create(ctx, &models.CreatePerformerInput{Performer: performer}); err != nil { return fmt.Errorf("Error creating performer: %s", err.Error()) } diff --git a/pkg/sqlite/query.go b/pkg/sqlite/query.go index 9c09d8beaed..4f4c0c8db56 100644 --- a/pkg/sqlite/query.go +++ b/pkg/sqlite/query.go @@ -133,6 +133,9 @@ func (qb *queryBuilder) join(table, as, onClause string) { func (qb *queryBuilder) addJoins(joins ...join) { qb.joins.add(joins...) + for _, j := range joins { + qb.args = append(qb.args, j.args...) + } } func (qb *queryBuilder) addFilter(f *filterBuilder) error { @@ -151,6 +154,9 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error { qb.args = append(args, qb.args...) } + // add joins here to insert args + qb.addJoins(f.getAllJoins()...) + clause, args = f.generateWhereClauses() if len(clause) > 0 { qb.addWhere(clause) @@ -169,8 +175,6 @@ func (qb *queryBuilder) addFilter(f *filterBuilder) error { qb.addArg(args...) } - qb.addJoins(f.getAllJoins()...) - return nil } diff --git a/pkg/sqlite/repository.go b/pkg/sqlite/repository.go index 2035b11c2fc..ac2954cfb24 100644 --- a/pkg/sqlite/repository.go +++ b/pkg/sqlite/repository.go @@ -222,8 +222,8 @@ func (r *repository) innerJoin(j joiner, as string, parentIDCol string) { } type joiner interface { - addLeftJoin(table, as, onClause string) - addInnerJoin(table, as, onClause string) + addLeftJoin(table, as, onClause string, args ...interface{}) + addInnerJoin(table, as, onClause string, args ...interface{}) } type joinRepository struct { diff --git a/pkg/sqlite/setup_test.go b/pkg/sqlite/setup_test.go index b63b6a04a2c..1d2854297ee 100644 --- a/pkg/sqlite/setup_test.go +++ b/pkg/sqlite/setup_test.go @@ -1508,6 +1508,18 @@ func performerAliases(i int) []string { return []string{getPerformerStringValue(i, "alias")} } +func getPerformerCustomFields(index int) map[string]interface{} { + if index%5 == 0 { + return nil + } + + return map[string]interface{}{ + "string": getPerformerStringValue(index, "custom"), + "int": int64(index % 5), + "real": float64(index) / 10, + } +} + // createPerformers creates n performers with plain Name and o performers with camel cased NaMe included func createPerformers(ctx context.Context, n int, o int) error { pqb := db.Performer @@ -1558,7 +1570,10 @@ func createPerformers(ctx context.Context, n int, o int) error { }) } - err := pqb.Create(ctx, &performer) + err := pqb.Create(ctx, &models.CreatePerformerInput{ + Performer: &performer, + CustomFields: getPerformerCustomFields(i), + }) if err != nil { return fmt.Errorf("Error creating performer %v+: %s", performer, err.Error()) diff --git a/pkg/sqlite/tables.go b/pkg/sqlite/tables.go index 481c4ee06a4..c6ab6a4d4d2 100644 --- a/pkg/sqlite/tables.go +++ b/pkg/sqlite/tables.go @@ -32,6 +32,7 @@ var ( performersURLsJoinTable = goqu.T(performerURLsTable) performersTagsJoinTable = goqu.T(performersTagsTable) performersStashIDsJoinTable = goqu.T("performer_stash_ids") + performersCustomFieldsTable = goqu.T("performer_custom_fields") studiosAliasesJoinTable = goqu.T(studioAliasesTable) studiosTagsJoinTable = goqu.T(studiosTagsTable) diff --git a/pkg/utils/json.go b/pkg/utils/json.go deleted file mode 100644 index ae69180688c..00000000000 --- a/pkg/utils/json.go +++ /dev/null @@ -1,16 +0,0 @@ -package utils - -import ( - "encoding/json" - "strings" -) - -// JSONNumberToNumber converts a JSON number to either a float64 or int64. -func JSONNumberToNumber(n json.Number) interface{} { - if strings.Contains(string(n), ".") { - f, _ := n.Float64() - return f - } - ret, _ := n.Int64() - return ret -} diff --git a/pkg/utils/map.go b/pkg/utils/map.go index dbef17646b2..0c555857443 100644 --- a/pkg/utils/map.go +++ b/pkg/utils/map.go @@ -1,7 +1,6 @@ package utils import ( - "encoding/json" "strings" ) @@ -80,19 +79,3 @@ func MergeMaps(dest map[string]interface{}, src map[string]interface{}) { dest[k] = v } } - -// ConvertMapJSONNumbers converts all JSON numbers in a map to either float64 or int64. -func ConvertMapJSONNumbers(m map[string]interface{}) (ret map[string]interface{}) { - ret = make(map[string]interface{}) - for k, v := range m { - if n, ok := v.(json.Number); ok { - ret[k] = JSONNumberToNumber(n) - } else if mm, ok := v.(map[string]interface{}); ok { - ret[k] = ConvertMapJSONNumbers(mm) - } else { - ret[k] = v - } - } - - return ret -} diff --git a/pkg/utils/map_test.go b/pkg/utils/map_test.go index 142cd639321..54dfacedd30 100644 --- a/pkg/utils/map_test.go +++ b/pkg/utils/map_test.go @@ -1,11 +1,8 @@ package utils import ( - "encoding/json" "reflect" "testing" - - "github.com/stretchr/testify/assert" ) func TestNestedMapGet(t *testing.T) { @@ -282,55 +279,3 @@ func TestMergeMaps(t *testing.T) { }) } } - -func TestConvertMapJSONNumbers(t *testing.T) { - tests := []struct { - name string - input map[string]interface{} - expected map[string]interface{} - }{ - { - name: "Convert JSON numbers to numbers", - input: map[string]interface{}{ - "int": json.Number("12"), - "float": json.Number("12.34"), - "string": "foo", - }, - expected: map[string]interface{}{ - "int": int64(12), - "float": 12.34, - "string": "foo", - }, - }, - { - name: "Convert JSON numbers to numbers in nested maps", - input: map[string]interface{}{ - "foo": map[string]interface{}{ - "int": json.Number("56"), - "float": json.Number("56.78"), - "nested-string": "bar", - }, - "int": json.Number("12"), - "float": json.Number("12.34"), - "string": "foo", - }, - expected: map[string]interface{}{ - "foo": map[string]interface{}{ - "int": int64(56), - "float": 56.78, - "nested-string": "bar", - }, - "int": int64(12), - "float": 12.34, - "string": "foo", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - result := ConvertMapJSONNumbers(tt.input) - assert.Equal(t, tt.expected, result) - }) - } -} diff --git a/ui/v2.5/graphql/data/performer.graphql b/ui/v2.5/graphql/data/performer.graphql index 0aa60ce21bb..035c8abc72d 100644 --- a/ui/v2.5/graphql/data/performer.graphql +++ b/ui/v2.5/graphql/data/performer.graphql @@ -41,4 +41,6 @@ fragment PerformerData on Performer { death_date hair_color weight + + custom_fields } diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx index e805c03e621..83fc64c956c 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerDetailsPanel.tsx @@ -14,6 +14,7 @@ import { FormatWeight, } from "../PerformerList"; import { PatchComponent } from "src/patch"; +import { CustomFields } from "src/components/Shared/CustomFields"; interface IPerformerDetails { performer: GQL.PerformerDataFragment; @@ -176,6 +177,7 @@ export const PerformerDetailsPanel: React.FC = value={renderStashIDs()} fullWidth={fullWidth} /> + {fullWidth && } ); }); diff --git a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx index 2adcb601e1e..df5b62b05f3 100644 --- a/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx +++ b/ui/v2.5/src/components/Performers/PerformerDetails/PerformerEditPanel.tsx @@ -47,6 +47,8 @@ import { yupUniqueStringList, } from "src/utils/yup"; import { useTagsEdit } from "src/hooks/tagsEdit"; +import { CustomFieldsInput } from "src/components/Shared/CustomFields"; +import { cloneDeep } from "@apollo/client/utilities"; const isScraper = ( scraper: GQL.Scraper | GQL.StashBox @@ -61,6 +63,16 @@ interface IPerformerDetails { setEncodingImage: (loading: boolean) => void; } +function customFieldInput(isNew: boolean, input: {}) { + if (isNew) { + return input; + } else { + return { + full: input, + }; + } +} + export const PerformerEditPanel: React.FC = ({ performer, isVisible, @@ -115,6 +127,7 @@ export const PerformerEditPanel: React.FC = ({ ignore_auto_tag: yup.boolean().defined(), stash_ids: yup.mixed().defined(), image: yup.string().nullable().optional(), + custom_fields: yup.object().required().defined(), }); const initialValues = { @@ -142,15 +155,26 @@ export const PerformerEditPanel: React.FC = ({ tag_ids: (performer.tags ?? []).map((t) => t.id), ignore_auto_tag: performer.ignore_auto_tag ?? false, stash_ids: getStashIDs(performer.stash_ids), + custom_fields: cloneDeep(performer.custom_fields ?? {}), }; type InputValues = yup.InferType; + const [customFieldsError, setCustomFieldsError] = useState(); + + function submit(values: InputValues) { + const input = { + ...schema.cast(values), + custom_fields: customFieldInput(isNew, values.custom_fields), + }; + onSave(input); + } + const formik = useFormik({ initialValues, enableReinitialize: true, validate: yupFormikValidate(schema), - onSubmit: (values) => onSave(schema.cast(values)), + onSubmit: submit, }); const { tags, updateTagsStateFromScraper, tagsControl } = useTagsEdit( @@ -571,7 +595,11 @@ export const PerformerEditPanel: React.FC = ({
@@ -44,7 +45,7 @@ export const ExpandCollapseButton: React.FC<{ className="minimal expand-collapse" onClick={() => setCollapsed(!collapsed)} > - + ); diff --git a/ui/v2.5/src/components/Shared/CustomFields.tsx b/ui/v2.5/src/components/Shared/CustomFields.tsx new file mode 100644 index 00000000000..233254f7a15 --- /dev/null +++ b/ui/v2.5/src/components/Shared/CustomFields.tsx @@ -0,0 +1,308 @@ +import React, { useEffect, useMemo, useRef, useState } from "react"; +import { CollapseButton } from "./CollapseButton"; +import { DetailItem } from "./DetailItem"; +import { Button, Col, Form, FormGroup, InputGroup, Row } from "react-bootstrap"; +import { FormattedMessage, useIntl } from "react-intl"; +import { cloneDeep } from "@apollo/client/utilities"; +import { Icon } from "./Icon"; +import { faMinus, faPlus } from "@fortawesome/free-solid-svg-icons"; +import cx from "classnames"; + +const maxFieldNameLength = 64; + +export type CustomFieldMap = { + [key: string]: unknown; +}; + +interface ICustomFields { + values: CustomFieldMap; +} + +function convertValue(value: unknown): string { + if (typeof value === "string") { + return value; + } else if (typeof value === "number") { + return value.toString(); + } else if (typeof value === "boolean") { + return value ? "true" : "false"; + } else if (Array.isArray(value)) { + return value.join(", "); + } else { + return JSON.stringify(value); + } +} + +const CustomField: React.FC<{ field: string; value: unknown }> = ({ + field, + value, +}) => { + const valueStr = convertValue(value); + + // replace spaces with hyphen characters for css id + const id = field.toLowerCase().replace(/ /g, "-"); + + return ( + + ); +}; + +export const CustomFields: React.FC = ({ values }) => { + const intl = useIntl(); + if (Object.keys(values).length === 0) { + return null; + } + + return ( + // according to linter rule CSS classes shouldn't use underscores +
+ + {Object.entries(values).map(([key, value]) => ( + + ))} + +
+ ); +}; + +function isNumeric(v: string) { + return /^-?(?:0|(?:[1-9][0-9]*))(?:\.[0-9]+)?$/.test(v); +} + +function convertCustomValue(v: string) { + // if the value is numeric, convert it to a number + if (isNumeric(v)) { + return Number(v); + } else { + return v; + } +} + +const CustomFieldInput: React.FC<{ + field: string; + value: unknown; + onChange: (field: string, value: unknown) => void; + isNew?: boolean; + error?: string; +}> = ({ field, value, onChange, isNew = false, error }) => { + const intl = useIntl(); + const [currentField, setCurrentField] = useState(field); + const [currentValue, setCurrentValue] = useState(value as string); + + const fieldRef = useRef(null); + const valueRef = useRef(null); + + useEffect(() => { + setCurrentField(field); + setCurrentValue(value as string); + }, [field, value]); + + function onBlur() { + onChange(currentField, convertCustomValue(currentValue)); + } + + function onDelete() { + onChange("", ""); + } + + return ( + + + + {isNew ? ( + <> + setCurrentField(event.currentTarget.value)} + onBlur={onBlur} + /> + + ) : ( + {currentField} + )} + + + + setCurrentValue(event.currentTarget.value)} + onBlur={onBlur} + /> + + {!isNew && ( + + )} + + + + + {error} + + ); +}; + +interface ICustomField { + field: string; + value: unknown; +} + +interface ICustomFieldsInput { + values: CustomFieldMap; + error?: string; + onChange: (values: CustomFieldMap) => void; + setError: (error?: string) => void; +} + +export const CustomFieldsInput: React.FC = ({ + values, + error, + onChange, + setError, +}) => { + const intl = useIntl(); + + const [newCustomField, setNewCustomField] = useState({ + field: "", + value: "", + }); + + const fields = useMemo(() => { + const valueCopy = cloneDeep(values); + if (newCustomField.field !== "" && error === undefined) { + delete valueCopy[newCustomField.field]; + } + + const ret = Object.keys(valueCopy); + ret.sort(); + return ret; + }, [values, newCustomField, error]); + + function onSetNewField(v: ICustomField) { + // validate the field name + let newError = undefined; + if (v.field.length > maxFieldNameLength) { + newError = intl.formatMessage({ + id: "errors.custom_fields.field_name_length", + }); + } + if (v.field.trim() === "" && v.value !== "") { + newError = intl.formatMessage({ + id: "errors.custom_fields.field_name_required", + }); + } + if (v.field.trim() !== v.field) { + newError = intl.formatMessage({ + id: "errors.custom_fields.field_name_whitespace", + }); + } + if (fields.includes(v.field)) { + newError = intl.formatMessage({ + id: "errors.custom_fields.duplicate_field", + }); + } + + const oldField = newCustomField; + + setNewCustomField(v); + + const valuesCopy = cloneDeep(values); + if (oldField.field !== "" && error === undefined) { + delete valuesCopy[oldField.field]; + } + + // if valid, pass up + if (!newError && v.field !== "") { + valuesCopy[v.field] = v.value; + } + + onChange(valuesCopy); + setError(newError); + } + + function onAdd() { + const newValues = { + ...values, + [newCustomField.field]: newCustomField.value, + }; + setNewCustomField({ field: "", value: "" }); + onChange(newValues); + } + + function fieldChanged( + currentField: string, + newField: string, + value: unknown + ) { + let newValues = cloneDeep(values); + delete newValues[currentField]; + if (newField !== "") { + newValues[newField] = value; + } + onChange(newValues); + } + + return ( + + + + + + + + + + + + {fields.map((field) => ( + + fieldChanged(field, newField, newValue) + } + /> + ))} + onSetNewField({ field, value })} + isNew + /> + + + + + ); +}; diff --git a/ui/v2.5/src/components/Shared/DetailItem.tsx b/ui/v2.5/src/components/Shared/DetailItem.tsx index 304655a4c69..a92f75868d3 100644 --- a/ui/v2.5/src/components/Shared/DetailItem.tsx +++ b/ui/v2.5/src/components/Shared/DetailItem.tsx @@ -3,34 +3,39 @@ import { FormattedMessage } from "react-intl"; interface IDetailItem { id?: string | null; + label?: React.ReactNode; value?: React.ReactNode; + labelTitle?: string; title?: string; fullWidth?: boolean; + showEmpty?: boolean; } export const DetailItem: React.FC = ({ id, + label, value, + labelTitle, title, fullWidth, + showEmpty = false, }) => { - if (!id || !value || value === "Na") { + if (!id || (!showEmpty && (!value || value === "Na"))) { return <>; } - const message = ; + const message = label ?? ; + + // according to linter rule CSS classes shouldn't use underscores + const sanitisedID = id.replace(/_/g, "-"); return ( - // according to linter rule CSS classes shouldn't use underscores
- + {message} {fullWidth ? ":" : ""} - + {value}
diff --git a/ui/v2.5/src/components/Shared/styles.scss b/ui/v2.5/src/components/Shared/styles.scss index 644eff047cd..50777fff380 100644 --- a/ui/v2.5/src/components/Shared/styles.scss +++ b/ui/v2.5/src/components/Shared/styles.scss @@ -197,6 +197,15 @@ button.collapse-button.btn-primary:not(:disabled):not(.disabled):active { border: none; box-shadow: none; color: #f5f8fa; + text-align: left; +} + +button.collapse-button { + .fa-icon { + margin-left: 0; + } + + padding-left: 0; } .hover-popover-content { @@ -678,3 +687,44 @@ button.btn.favorite-button { } } } + +.custom-fields .detail-item .detail-item-title { + max-width: 130px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; +} + +.custom-fields-input > .collapse-button { + font-weight: 700; +} + +.custom-fields-row { + align-items: center; + font-family: "Courier New", Courier, monospace; + font-size: 0.875rem; + + .form-label { + margin-bottom: 0; + max-width: 100%; + overflow: hidden; + text-overflow: ellipsis; + vertical-align: middle; + white-space: nowrap; + } + + // labels with titles are styled with help cursor and dotted underline elsewhere + div.custom-fields-field label.form-label { + cursor: inherit; + text-decoration: inherit; + } + + .form-control, + .btn { + font-size: 0.875rem; + } + + &.custom-fields-new > div:not(:last-child) { + padding-right: 0; + } +} diff --git a/ui/v2.5/src/locales/en-GB.json b/ui/v2.5/src/locales/en-GB.json index f9e5c3c4902..f22ba1de574 100644 --- a/ui/v2.5/src/locales/en-GB.json +++ b/ui/v2.5/src/locales/en-GB.json @@ -854,6 +854,11 @@ "only": "Only" }, "custom": "Custom", + "custom_fields": { + "field": "Field", + "title": "Custom Fields", + "value": "Value" + }, "date": "Date", "date_format": "YYYY-MM-DD", "datetime_format": "YYYY-MM-DD HH:MM", @@ -1035,6 +1040,12 @@ }, "empty_server": "Add some scenes to your server to view recommendations on this page.", "errors": { + "custom_fields": { + "duplicate_field": "Field name must be unique", + "field_name_length": "Field name must fewer than 65 characters", + "field_name_required": "Field name is required", + "field_name_whitespace": "Field name cannot have leading or trailing whitespace" + }, "header": "Error", "image_index_greater_than_zero": "Image index must be greater than 0", "invalid_javascript_string": "Invalid javascript code: {error}",