diff options
| author | Philipp Tanlak <philipp.tanlak@gmail.com> | 2023-10-17 18:16:58 +0200 |
|---|---|---|
| committer | Philipp Tanlak <philipp.tanlak@gmail.com> | 2023-10-17 18:16:58 +0200 |
| commit | 11d73f57a80bb65b7507ec80433b8f035ed226c2 (patch) | |
| tree | 6613f9f58d4a91f7267f852e910b5b73015f4db5 | |
| parent | 36846200df6e53723149b5f23ae78913290f6f8d (diff) | |
Allow configuration of links to follow
| -rw-r--r-- | README.md | 1 | ||||
| -rw-r--r-- | modules/followlinks/followlinks.go | 67 | ||||
| -rw-r--r-- | modules/followlinks/followlinks_test.go | 96 |
3 files changed, 148 insertions, 16 deletions
@@ -140,6 +140,7 @@ Below is an example scraping script that showcases the capabilities of flyscrape export const config = { url: "https://example.com/", // Specify the URL to start scraping from. depth: 0, // Specify how deep links should be followed. (default = 0, no follow) + follow: [], // Speficy the css selectors to follow (default = ["a[href]"]) allowedDomains: [], // Specify the allowed domains. ['*'] for all. (default = domain from url) blockedDomains: [], // Specify the blocked domains. (default = none) allowedURLs: [], // Specify the allowed URLs as regex. (default = all allowed) diff --git a/modules/followlinks/followlinks.go b/modules/followlinks/followlinks.go index c53f167..c1448be 100644 --- a/modules/followlinks/followlinks.go +++ b/modules/followlinks/followlinks.go @@ -6,6 +6,7 @@ package followlinks import ( "net/url" + "regexp" "strings" "github.com/PuerkitoBio/goquery" @@ -16,7 +17,9 @@ func init() { flyscrape.RegisterModule(Module{}) } -type Module struct{} +type Module struct { + Follow []string `json:"follow"` +} func (Module) ModuleInfo() flyscrape.ModuleInfo { return flyscrape.ModuleInfo{ @@ -25,13 +28,19 @@ func (Module) ModuleInfo() flyscrape.ModuleInfo { } } +func (m *Module) Provision(ctx flyscrape.Context) { + if len(m.Follow) == 0 { + m.Follow = []string{"a[href]"} + } +} + func (m *Module) ReceiveResponse(resp *flyscrape.Response) { - for _, link := range parseLinks(string(resp.Body), resp.Request.URL) { + for _, link := range m.parseLinks(string(resp.Body), resp.Request.URL) { resp.Visit(link) } } -func parseLinks(html string, origin string) []string { +func (m *Module) parseLinks(html string, origin string) []string { var links []string doc, err := goquery.NewDocumentFromReader(strings.NewReader(html)) if err != nil { @@ -44,22 +53,26 @@ func parseLinks(html string, origin string) []string { } uniqueLinks := make(map[string]bool) - doc.Find("a").Each(func(i int, s *goquery.Selection) { - link, _ := s.Attr("href") - parsedLink, err := originurl.Parse(link) + for _, selector := range m.Follow { + attr := parseSelectorAttr(selector) + doc.Find(selector).Each(func(i int, s *goquery.Selection) { + link, _ := s.Attr(attr) - if err != nil || !isValidLink(parsedLink) { - return - } + parsedLink, err := originurl.Parse(link) - absLink := parsedLink.String() + if err != nil || !isValidLink(parsedLink) { + return + } - if !uniqueLinks[absLink] { - links = append(links, absLink) - uniqueLinks[absLink] = true - } - }) + absLink := parsedLink.String() + + if !uniqueLinks[absLink] { + links = append(links, absLink) + uniqueLinks[absLink] = true + } + }) + } return links } @@ -72,4 +85,26 @@ func isValidLink(link *url.URL) bool { return true } -var _ flyscrape.ResponseReceiver = (*Module)(nil) +func parseSelectorAttr(sel string) string { + matches := selectorExpr.FindAllString(sel, -1) + if len(matches) == 0 { + return "href" + } + + attr := attrExpr.FindString(matches[len(matches)-1]) + if attr == "" { + return "href" + } + + return attr +} + +var ( + _ flyscrape.Provisioner = (*Module)(nil) + _ flyscrape.ResponseReceiver = (*Module)(nil) +) + +var ( + selectorExpr = regexp.MustCompile(`\[(.*?)\]`) + attrExpr = regexp.MustCompile(`[\w-]+`) +) diff --git a/modules/followlinks/followlinks_test.go b/modules/followlinks/followlinks_test.go index 0a628c3..f3eb4fe 100644 --- a/modules/followlinks/followlinks_test.go +++ b/modules/followlinks/followlinks_test.go @@ -47,3 +47,99 @@ func TestFollowLinks(t *testing.T) { require.Contains(t, urls, "http://www.google.com") require.Contains(t, urls, "http://www.google.com/baz") } + +func TestFollowSelector(t *testing.T) { + var urls []string + var mu sync.Mutex + + scraper := flyscrape.NewScraper() + scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/foo/bar"}) + scraper.LoadModule(&followlinks.Module{ + Follow: []string{".next a[href]"}, + }) + + scraper.LoadModule(hook.Module{ + AdaptTransportFn: func(rt http.RoundTripper) http.RoundTripper { + return flyscrape.MockTransport(200, ` + <a href="/baz">Baz</a> + <a href="baz">Baz</a> + <div class="next"> + <a href="http://www.google.com">Google</a> + </div>`) + }, + ReceiveResponseFn: func(r *flyscrape.Response) { + mu.Lock() + urls = append(urls, r.Request.URL) + mu.Unlock() + }, + }) + + scraper.Run() + + require.Len(t, urls, 2) + require.Contains(t, urls, "http://www.example.com/foo/bar") + require.Contains(t, urls, "http://www.google.com") +} + +func TestFollowDataAttr(t *testing.T) { + var urls []string + var mu sync.Mutex + + scraper := flyscrape.NewScraper() + scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/foo/bar"}) + scraper.LoadModule(&followlinks.Module{ + Follow: []string{"[data-url]"}, + }) + + scraper.LoadModule(hook.Module{ + AdaptTransportFn: func(rt http.RoundTripper) http.RoundTripper { + return flyscrape.MockTransport(200, ` + <a href="/baz">Baz</a> + <a href="baz">Baz</a> + <div data-url="http://www.google.com">Google</div>`) + }, + ReceiveResponseFn: func(r *flyscrape.Response) { + mu.Lock() + urls = append(urls, r.Request.URL) + mu.Unlock() + }, + }) + + scraper.Run() + + require.Len(t, urls, 2) + require.Contains(t, urls, "http://www.example.com/foo/bar") + require.Contains(t, urls, "http://www.google.com") +} + +func TestFollowMultiple(t *testing.T) { + var urls []string + var mu sync.Mutex + + scraper := flyscrape.NewScraper() + scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/foo/bar"}) + scraper.LoadModule(&followlinks.Module{ + Follow: []string{"a.prev", "a.next"}, + }) + + scraper.LoadModule(hook.Module{ + AdaptTransportFn: func(rt http.RoundTripper) http.RoundTripper { + return flyscrape.MockTransport(200, ` + <a href="/baz">Baz</a> + <a class="prev" href="a">a</a> + <a class="next" href="b">b</a>`) + }, + ReceiveResponseFn: func(r *flyscrape.Response) { + mu.Lock() + urls = append(urls, r.Request.URL) + mu.Unlock() + }, + }) + + scraper.Run() + + require.Len(t, urls, 3) + require.Contains(t, urls, "http://www.example.com/foo/bar") + require.Contains(t, urls, "http://www.example.com/foo/a") + require.Contains(t, urls, "http://www.example.com/foo/b") +} |