Skip to content

Commit

Permalink
use the same io.Writer as the cobra.Commands for writing crawl logs
Browse files Browse the repository at this point in the history
  • Loading branch information
atomicptr committed Apr 4, 2020
1 parent 2f33c68 commit 9e8eaae
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 5 deletions.
2 changes: 1 addition & 1 deletion pkg/cli/crawl/cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ var Command = &cobra.Command{
modifier := crawler.RequestModifier{}
registerStandardCrawlCommandFlagModifiers(&modifier, crawlCommandFlags)

err := crawlUrls(args, modifier, crawlCommandFlags)
err := crawlUrls(args, modifier, crawlCommandFlags, cmd.OutOrStdout())
if err != nil {
fmt.Printf("Could not create crawlable URLs:\n\t%s\n", err)
os.Exit(1)
Expand Down
2 changes: 1 addition & 1 deletion pkg/cli/crawl/cmd_list.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ var ListCommand = &cobra.Command{
os.Exit(1)
}

err = crawlUrls(urls, modifier, listCommandFlags)
err = crawlUrls(urls, modifier, listCommandFlags, cmd.OutOrStdout())
if err != nil {
fmt.Printf("Could not create crawlable URLs:\n\t%s\n", err)
os.Exit(1)
Expand Down
2 changes: 1 addition & 1 deletion pkg/cli/crawl/cmd_sitemap.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ var SitemapCommand = &cobra.Command{
os.Exit(1)
}

err = crawlUrls(urls, modifier, sitemapCommandFlags)
err = crawlUrls(urls, modifier, sitemapCommandFlags, cmd.OutOrStdout())
if err != nil {
fmt.Printf("Could not create crawlable URLs:\n\t%s\n", err)
os.Exit(1)
Expand Down
4 changes: 3 additions & 1 deletion pkg/cli/crawl/commons.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package crawl

import (
"fmt"
"io"
"net/http"
"net/url"
"strings"
Expand Down Expand Up @@ -71,7 +72,7 @@ func registerStandardCrawlCommandFlagModifiers(modifier *crawler.RequestModifier
}
}

func crawlUrls(urls []string, modifier crawler.RequestModifier, flagOptions crawlerFlagOptions) error {
func crawlUrls(urls []string, modifier crawler.RequestModifier, flagOptions crawlerFlagOptions, outWriter io.Writer) error {
requests, err := crawler.CreateRequestsFromUrls(urls, modifier)
if err != nil {
return err
Expand All @@ -90,6 +91,7 @@ func crawlUrls(urls []string, modifier crawler.RequestModifier, flagOptions craw
Timeout: flagOptions.HttpTimeout,
},
FilterStatusQuery: flagOptions.FilterStatusQuery,
OutWriter: outWriter,
}
crawl.Crawl(requests)

Expand Down
2 changes: 2 additions & 0 deletions pkg/crawler/crawler.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package crawler

import (
"io"
"net/http"
"sync"
"time"
Expand All @@ -14,6 +15,7 @@ type Crawler struct {
HttpClient http.Client
NumberOfWorkers int
FilterStatusQuery string
OutWriter io.Writer
statusFilter *filter.Filter
printMutex sync.Mutex
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/crawler/log.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ func (c *Crawler) safePrintln(statusCode int, message string) {

if c.statusFilter.IsValid(c.FilterStatusQuery, int64(statusCode)) {
c.printMutex.Lock()
fmt.Println(message)
_, _ = fmt.Fprintln(c.OutWriter, message)
c.printMutex.Unlock()
}
}
Expand Down

0 comments on commit 9e8eaae

Please sign in to comment.