Skip to content

Commit

Permalink
Limit scraping (xbapps#1592)
Browse files Browse the repository at this point in the history
* Ability to limit scraping to 1 page

* Set Site Optoion Limit Scraping
  • Loading branch information
toshski authored Jan 17, 2024
1 parent 706eee3 commit 10e64a0
Show file tree
Hide file tree
Showing 42 changed files with 258 additions and 172 deletions.
10 changes: 10 additions & 0 deletions pkg/api/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,9 @@ func (i ConfigResource) WebService() *restful.WebService {
ws.Route(ws.PUT("/sites/subscribed/{site}").To(i.toggleSubscribed).
Metadata(restfulspec.KeyOpenAPITags, tags))

ws.Route(ws.PUT("/sites/limit_scraping/{site}").To(i.toggleLimitScraping).
Metadata(restfulspec.KeyOpenAPITags, tags))

ws.Route(ws.POST("/scraper/force-site-update").To(i.forceSiteUpdate).
Metadata(restfulspec.KeyOpenAPITags, tags))

Expand Down Expand Up @@ -313,6 +316,10 @@ func (i ConfigResource) toggleSubscribed(req *restful.Request, resp *restful.Res
i.toggleSiteField(req, resp, "Subscribed")
}

func (i ConfigResource) toggleLimitScraping(req *restful.Request, resp *restful.Response) {
i.toggleSiteField(req, resp, "LimitScraping")
}

func (i ConfigResource) toggleSiteField(req *restful.Request, resp *restful.Response, field string) {
db, _ := models.GetDB()
defer db.Close()
Expand All @@ -335,6 +342,9 @@ func (i ConfigResource) toggleSiteField(req *restful.Request, resp *restful.Resp
site.Subscribed = !site.Subscribed
log.Infof("Toggling %s %v", id, site.Subscribed)
db.Model(&models.Scene{}).Where("scraper_id = ?", site.ID).Update("is_subscribed", site.Subscribed)
case "LimitScraping":
site.LimitScraping = !site.LimitScraping
db.Model(&models.Scene{}).Where("scraper_id = ?", site.ID).Update("limit_scraping", site.LimitScraping)
}
site.Save()

Expand Down
9 changes: 9 additions & 0 deletions pkg/migrations/migrations.go
Original file line number Diff line number Diff line change
Expand Up @@ -767,6 +767,15 @@ func Migrate() {
return tx.AutoMigrate(&Scene{}).Error
},
},
{
ID: "0074-Limit-Scraper",
Migrate: func(tx *gorm.DB) error {
type Site struct {
LimitScraping bool `json:"limit_scraping" xbvrbackup:"limit_scraping"`
}
return tx.AutoMigrate(Site{}).Error
},
},

// ===============================================================================================
// Put DB Schema migrations above this line and migrations that rely on the updated schema below
Expand Down
2 changes: 1 addition & 1 deletion pkg/models/model_scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import (

var scrapers []Scraper

type ScraperFunc func(*sync.WaitGroup, bool, []string, chan<- ScrapedScene, string, string) error
type ScraperFunc func(*sync.WaitGroup, bool, []string, chan<- ScrapedScene, string, string, bool) error

type Scraper struct {
ID string `json:"id"`
Expand Down
17 changes: 9 additions & 8 deletions pkg/models/model_site.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,14 +8,15 @@ import (
)

type Site struct {
ID string `gorm:"primary_key" json:"id" xbvrbackup:"-"`
Name string `json:"name" xbvrbackup:"name"`
AvatarURL string `json:"avatar_url" xbvrbackup:"-"`
IsBuiltin bool `json:"is_builtin" xbvrbackup:"-"`
IsEnabled bool `json:"is_enabled" xbvrbackup:"is_enabled"`
LastUpdate time.Time `json:"last_update" xbvrbackup:"-"`
Subscribed bool `json:"subscribed" xbvrbackup:"subscribed"`
HasScraper bool `gorm:"-" json:"has_scraper" xbvrbackup:"-"`
ID string `gorm:"primary_key" json:"id" xbvrbackup:"-"`
Name string `json:"name" xbvrbackup:"name"`
AvatarURL string `json:"avatar_url" xbvrbackup:"-"`
IsBuiltin bool `json:"is_builtin" xbvrbackup:"-"`
IsEnabled bool `json:"is_enabled" xbvrbackup:"is_enabled"`
LastUpdate time.Time `json:"last_update" xbvrbackup:"-"`
Subscribed bool `json:"subscribed" xbvrbackup:"subscribed"`
HasScraper bool `gorm:"-" json:"has_scraper" xbvrbackup:"-"`
LimitScraping bool `json:"limit_scraping" xbvrbackup:"limit_scraping"`
}

func (i *Site) Save() error {
Expand Down
2 changes: 1 addition & 1 deletion pkg/scrape/baberoticavr.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func BaberoticaVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
func BaberoticaVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "baberoticavr"
siteID := "BaberoticaVR"
Expand Down
28 changes: 15 additions & 13 deletions pkg/scrape/badoink.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func BadoinkSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string) error {
func BadoinkSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)

Expand Down Expand Up @@ -187,8 +187,10 @@ func BadoinkSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out
})

siteCollector.OnHTML(`div.pagination a`, func(e *colly.HTMLElement) {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
if !limitScraping {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
}
})

siteCollector.OnHTML(`main[data-page=VideoList] a.video-card-image-container`, func(e *colly.HTMLElement) {
Expand Down Expand Up @@ -238,24 +240,24 @@ func BadoinkSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out
return nil
}

func BadoinkVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "badoinkvr", "BadoinkVR", "https://badoinkvr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo)
func BadoinkVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "badoinkvr", "BadoinkVR", "https://badoinkvr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func B18VR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "18vr", "18VR", "https://18vr.com/vrpornvideos", singeScrapeAdditionalInfo)
func B18VR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "18vr", "18VR", "https://18vr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func VRCosplayX(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "vrcosplayx", "VRCosplayX", "https://vrcosplayx.com/cosplaypornvideos", singeScrapeAdditionalInfo)
func VRCosplayX(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "vrcosplayx", "VRCosplayX", "https://vrcosplayx.com/cosplaypornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func BabeVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "babevr", "BabeVR", "https://babevr.com/vrpornvideos", singeScrapeAdditionalInfo)
func BabeVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "babevr", "BabeVR", "https://babevr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func KinkVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "kinkvr", "KinkVR", "https://kinkvr.com/bdsm-vr-videos", singeScrapeAdditionalInfo)
func KinkVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "kinkvr", "KinkVR", "https://kinkvr.com/bdsm-vr-videos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func init() {
Expand Down
10 changes: 6 additions & 4 deletions pkg/scrape/caribbeancom.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import (
"golang.org/x/text/language"
)

func CariVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
func CariVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "caribbeancomvr"
siteID := "CaribbeanCom VR"
Expand Down Expand Up @@ -125,9 +125,11 @@ func CariVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<
})

siteCollector.OnHTML(`.pagination-large .pagination__item[rel="next"]`, func(e *colly.HTMLElement) {
// replace "all" with "vr" to allow for correct page navigation
pageURL := strings.Replace(e.Request.AbsoluteURL(e.Attr("href")), "all", "vr", 1)
siteCollector.Visit(pageURL)
if !limitScraping {
// replace "all" with "vr" to allow for correct page navigation
pageURL := strings.Replace(e.Request.AbsoluteURL(e.Attr("href")), "all", "vr", 1)
siteCollector.Visit(pageURL)
}
})

if singleSceneURL != "" {
Expand Down
18 changes: 10 additions & 8 deletions pkg/scrape/czechvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func CzechVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, nwID string, singeScrapeAdditionalInfo string) error {
func CzechVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, nwID string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)

Expand Down Expand Up @@ -149,8 +149,10 @@ func CzechVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan
})

siteCollector.OnHTML(`div#StrankovaniDesktop span.stred a,div#StrankovaniDesktopHome span.stred a`, func(e *colly.HTMLElement) {
pageURL := e.Request.AbsoluteURL(e.Attr("href") + "&sites=" + nwID)
siteCollector.Visit(pageURL)
if !limitScraping {
pageURL := e.Request.AbsoluteURL(e.Attr("href") + "&sites=" + nwID)
siteCollector.Visit(pageURL)
}
})

siteCollector.OnHTML(`div.postTag`, func(e *colly.HTMLElement) {
Expand Down Expand Up @@ -182,7 +184,7 @@ func CzechVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan
if singleSceneURL != "" {
sceneCollector.Visit(singleSceneURL)
} else {
siteCollector.Visit("https://www.czechvrnetwork.com/vr-porn-videos&sites=" + nwID)
siteCollector.Visit("https://www.czechvrnetwork.com/vr-porn-videos&sort=date&sites=" + nwID)
}

if updateSite {
Expand All @@ -194,15 +196,15 @@ func CzechVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan
}

func addCZVRScraper(id string, name string, nwid string, avatarURL string) {
registerScraper(id, name, avatarURL, "czechvrnetwork.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return CzechVR(wg, updateSite, knownScenes, out, singleSceneURL, id, name, nwid, singeScrapeAdditionalInfo)
registerScraper(id, name, avatarURL, "czechvrnetwork.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return CzechVR(wg, updateSite, knownScenes, out, singleSceneURL, id, name, nwid, singeScrapeAdditionalInfo, limitScraping)
})
}

func init() {
// scraper for scraping single scenes where only the url is provided
registerScraper("czechvr-single_scene", "Czech VR - Other Studios", "", "czechvrnetwork.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return CzechVR(wg, updateSite, knownScenes, out, singleSceneURL, "", "", "", "")
registerScraper("czechvr-single_scene", "Czech VR - Other Studios", "", "czechvrnetwork.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return CzechVR(wg, updateSite, knownScenes, out, singleSceneURL, "", "", "", "", limitScraping)
})
addCZVRScraper("czechvr", "Czech VR", "15", "https://www.czechvr.com/images/favicon/android-chrome-256x256.png")
addCZVRScraper("czechvrfetish", "Czech VR Fetish", "16", "https://www.czechvrfetish.com/images/favicon/android-chrome-256x256.png")
Expand Down
8 changes: 5 additions & 3 deletions pkg/scrape/darkroomvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func DarkRoomVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
func DarkRoomVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "darkroomvr"
siteID := "DarkRoomVR"
Expand Down Expand Up @@ -114,8 +114,10 @@ func DarkRoomVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out c
})

siteCollector.OnHTML(`div.pagination a`, func(e *colly.HTMLElement) {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
if !limitScraping {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
}
})

siteCollector.OnHTML(`div.video-card__item a[class=image-container]`, func(e *colly.HTMLElement) {
Expand Down
5 changes: 4 additions & 1 deletion pkg/scrape/fuckpassvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func FuckPassVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
func FuckPassVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "fuckpassvr-native"
siteID := "FuckPassVR"
Expand Down Expand Up @@ -121,6 +121,9 @@ func FuckPassVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out c

var page int64 = 1
var lastPage int64 = 1
if limitScraping {
lastPage = 1
}

if singleSceneURL != "" {
ctx := colly.NewContext()
Expand Down
8 changes: 5 additions & 3 deletions pkg/scrape/groobyvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func GroobyVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
func GroobyVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "groobyvr"
siteID := "GroobyVR"
Expand Down Expand Up @@ -115,8 +115,10 @@ func GroobyVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out cha
})

siteCollector.OnHTML(`div.pagination li a:not(.active)`, func(e *colly.HTMLElement) {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
if !limitScraping {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
}
})

if singleSceneURL != "" {
Expand Down
2 changes: 1 addition & 1 deletion pkg/scrape/hologirlsvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func HoloGirlsVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
func HoloGirlsVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "hologirlsvr"
siteID := "HoloGirlsVR"
Expand Down
16 changes: 9 additions & 7 deletions pkg/scrape/lethalhardcorevr.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ func isGoodTag(lookup string) bool {
return true
}

func LethalHardcoreSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string) error {
func LethalHardcoreSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)

Expand Down Expand Up @@ -138,8 +138,10 @@ func LethalHardcoreSite(wg *sync.WaitGroup, updateSite bool, knownScenes []strin
})

siteCollector.OnHTML(`div.pagination a`, func(e *colly.HTMLElement) {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
if !limitScraping {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
}
})

siteCollector.OnHTML(`div.scene-list-item`, func(e *colly.HTMLElement) {
Expand Down Expand Up @@ -174,12 +176,12 @@ func LethalHardcoreSite(wg *sync.WaitGroup, updateSite bool, knownScenes []strin
return nil
}

func LethalHardcoreVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return LethalHardcoreSite(wg, updateSite, knownScenes, out, singleSceneURL, "lethalhardcorevr", "LethalHardcoreVR", "https://lethalhardcorevr.com/lethal-hardcore-vr-scenes.html?studio=95595", singeScrapeAdditionalInfo)
func LethalHardcoreVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return LethalHardcoreSite(wg, updateSite, knownScenes, out, singleSceneURL, "lethalhardcorevr", "LethalHardcoreVR", "https://lethalhardcorevr.com/lethal-hardcore-vr-scenes.html?studio=95595&sort=released", singeScrapeAdditionalInfo, limitScraping)
}

func WhorecraftVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
return LethalHardcoreSite(wg, updateSite, knownScenes, out, singleSceneURL, "whorecraftvr", "WhorecraftVR", "https://lethalhardcorevr.com/lethal-hardcore-vr-scenes.html?studio=95347", singeScrapeAdditionalInfo)
func WhorecraftVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return LethalHardcoreSite(wg, updateSite, knownScenes, out, singleSceneURL, "whorecraftvr", "WhorecraftVR", "https://lethalhardcorevr.com/lethal-hardcore-vr-scenes.html?studio=95347&sort=released", singeScrapeAdditionalInfo, limitScraping)
}

func init() {
Expand Down
2 changes: 1 addition & 1 deletion pkg/scrape/littlecaprice.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func LittleCaprice(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string) error {
func LittleCaprice(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "littlecaprice"
siteID := "Little Caprice Dreams"
Expand Down
Loading

0 comments on commit 10e64a0

Please sign in to comment.