Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clean up deprecated arguments #380

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions .snapshots/TestHelp
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,9 @@ Application Options:
regular expressions
--follow-robots-txt Follow robots.txt when scraping
pages
--follow-sitemap-xml Scrape only pages listed in
sitemap.xml (deprecated)
--header=<header>... Custom headers
-f, --ignore-fragments Ignore URL fragments
--format=[text|json|junit] Output format (default: text)
--json Output results in JSON (deprecated)
--experimental-verbose-json Include successful results in JSON
(deprecated)
--junit Output results as JUnit XML file
(deprecated)
-r, --max-redirections=<count> Maximum number of redirections
(default: 64)
--rate-limit=<rate> Max requests per second
Expand Down
22 changes: 2 additions & 20 deletions arguments.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,10 @@ type arguments struct {
RawExcludedPatterns []string `short:"e" long:"exclude" value-name:"<pattern>..." description:"Exclude URLs matched with given regular expressions"`
RawIncludedPatterns []string `short:"i" long:"include" value-name:"<pattern>..." description:"Include URLs matched with given regular expressions"`
FollowRobotsTxt bool `long:"follow-robots-txt" description:"Follow robots.txt when scraping pages"`
FollowSitemapXML bool `long:"follow-sitemap-xml" description:"Scrape only pages listed in sitemap.xml (deprecated)"`
RawHeaders []string `long:"header" value-name:"<header>..." description:"Custom headers"`
// TODO Remove a short option.
IgnoreFragments bool `short:"f" long:"ignore-fragments" description:"Ignore URL fragments"`
Format string `long:"format" description:"Output format" default:"text" choice:"text" choice:"json" choice:"junit"`
// TODO Remove this option.
JSONOutput bool `long:"json" description:"Output results in JSON (deprecated)"`
// TODO Remove this option.
VerboseJSON bool `long:"experimental-verbose-json" description:"Include successful results in JSON (deprecated)"`
// TODO Remove this option.
JUnitOutput bool `long:"junit" description:"Output results as JUnit XML file (deprecated)"`
IgnoreFragments bool `short:"f" long:"ignore-fragments" description:"Ignore URL fragments"`
Format string `long:"format" description:"Output format" default:"text" choice:"text" choice:"json" choice:"junit"`
MaxRedirections int `short:"r" long:"max-redirections" value-name:"<count>" default:"64" description:"Maximum number of redirections"`
RateLimit int `long:"rate-limit" value-name:"<rate>" description:"Max requests per second"`
Timeout int `short:"t" long:"timeout" value-name:"<seconds>" default:"10" description:"Timeout for HTTP requests in seconds"`
Expand Down Expand Up @@ -59,8 +52,6 @@ func getArguments(ss []string) (*arguments, error) {
return nil, errors.New("invalid number of arguments")
}

reconcileDeprecatedArguments(&args)

args.URL = ss[0]

args.ExcludedPatterns, err = compileRegexps(args.RawExcludedPatterns)
Expand Down Expand Up @@ -133,12 +124,3 @@ func parseHeaders(headers []string) (http.Header, error) {

return h, nil
}

func reconcileDeprecatedArguments(args *arguments) {
if args.JSONOutput {
args.Format = "json"
args.Verbose = args.Verbose || args.VerboseJSON
} else if args.JUnitOutput {
args.Format = "junit"
}
}
3 changes: 1 addition & 2 deletions arguments_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@ func TestGetArguments(t *testing.T) {
{"-r", "4", "https://foo.com"},
{"--max-redirections", "4", "https://foo.com"},
{"--follow-robots-txt", "https://foo.com"},
{"--follow-sitemap-xml", "https://foo.com"},
{"-t", "10", "https://foo.com"},
{"--timeout", "10", "https://foo.com"},
{"--rate-limit", "1", "https://foo.com"},
Expand All @@ -36,7 +35,7 @@ func TestGetArguments(t *testing.T) {
{"-v", "-f", "https://foo.com"},
{"-v", "--ignore-fragments", "https://foo.com"},
{"--one-page-only", "https://foo.com"},
{"--json", "https://foo.com"},
{"--format=json", "https://foo.com"},
{"-h"},
{"--help"},
{"--version"},
Expand Down
8 changes: 0 additions & 8 deletions command.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,14 +99,6 @@ func (c *command) runWithError(ss []string) (bool, error) {

sm := (map[string]struct{})(nil)

if args.FollowSitemapXML {
sm, err = newSitemapFetcher(client).Fetch(p.URL())

if err != nil {
return false, err
}
}

checker := newPageChecker(
f,
newLinkValidator(p.URL().Hostname(), rd, sm),
Expand Down
22 changes: 4 additions & 18 deletions command_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ func TestCommandFailToRunWithJSONOutput(t *testing.T) {

return nil, errors.New("foo")
},
).Run([]string{"--json", "http://foo.com"})
).Run([]string{"--format=json", "http://foo.com"})

assert.False(t, ok)
assert.Greater(t, b.Len(), 0)
Expand All @@ -250,26 +250,12 @@ func TestCommandDoNotIncludeSuccessfulPageInJSONOutput(t *testing.T) {
func(u *url.URL) (*fakeHttpResponse, error) {
return newFakeHtmlResponse("", ""), nil
},
).Run([]string{"--json", "http://foo.com"})
).Run([]string{"--format=json", "http://foo.com"})

assert.True(t, ok)
assert.Equal(t, strings.TrimSpace(b.String()), "[]")
}

func TestCommandIncludeSuccessfulPageInJSONOutputWhenRequested(t *testing.T) {
b := &bytes.Buffer{}

ok := newTestCommandWithStdout(
b,
func(u *url.URL) (*fakeHttpResponse, error) {
return newFakeHtmlResponse("", ""), nil
},
).Run([]string{"--json", "--experimental-verbose-json", "http://foo.com"})

assert.True(t, ok)
assert.Equal(t, strings.TrimSpace(b.String()), "[{\"url\":\"\",\"links\":[]}]")
}

func TestCommandFailToRunWithJUnitOutput(t *testing.T) {
b := &bytes.Buffer{}

Expand All @@ -285,7 +271,7 @@ func TestCommandFailToRunWithJUnitOutput(t *testing.T) {

return nil, errors.New("foo")
},
).Run([]string{"--junit", "http://foo.com"})
).Run([]string{"--format=junit", "http://foo.com"})

assert.False(t, ok)
assert.Greater(t, b.Len(), 0)
Expand All @@ -295,7 +281,7 @@ func TestCommandFailWithVerboseAndJUnitOptions(t *testing.T) {
b := &bytes.Buffer{}

ok := newTestCommandWithStderr(b, nil).Run(
[]string{"--junit", "--verbose", "http://foo.com"},
[]string{"--format=junit", "--verbose", "http://foo.com"},
)

assert.False(t, ok)
Expand Down
Loading