summaryrefslogtreecommitdiff
path: root/modules/followlinks
diff options
context:
space:
mode:
authorPhilipp Tanlak <philipp.tanlak@gmail.com>2023-10-17 18:16:58 +0200
committerPhilipp Tanlak <philipp.tanlak@gmail.com>2023-10-17 18:16:58 +0200
commit11d73f57a80bb65b7507ec80433b8f035ed226c2 (patch)
tree6613f9f58d4a91f7267f852e910b5b73015f4db5 /modules/followlinks
parent36846200df6e53723149b5f23ae78913290f6f8d (diff)
Allow configuration of links to follow
Diffstat (limited to 'modules/followlinks')
-rw-r--r--modules/followlinks/followlinks.go67
-rw-r--r--modules/followlinks/followlinks_test.go96
2 files changed, 147 insertions, 16 deletions
diff --git a/modules/followlinks/followlinks.go b/modules/followlinks/followlinks.go
index c53f167..c1448be 100644
--- a/modules/followlinks/followlinks.go
+++ b/modules/followlinks/followlinks.go
@@ -6,6 +6,7 @@ package followlinks
import (
"net/url"
+ "regexp"
"strings"
"github.com/PuerkitoBio/goquery"
@@ -16,7 +17,9 @@ func init() {
flyscrape.RegisterModule(Module{})
}
-type Module struct{}
+type Module struct {
+ Follow []string `json:"follow"`
+}
func (Module) ModuleInfo() flyscrape.ModuleInfo {
return flyscrape.ModuleInfo{
@@ -25,13 +28,19 @@ func (Module) ModuleInfo() flyscrape.ModuleInfo {
}
}
+func (m *Module) Provision(ctx flyscrape.Context) {
+ if len(m.Follow) == 0 {
+ m.Follow = []string{"a[href]"}
+ }
+}
+
func (m *Module) ReceiveResponse(resp *flyscrape.Response) {
- for _, link := range parseLinks(string(resp.Body), resp.Request.URL) {
+ for _, link := range m.parseLinks(string(resp.Body), resp.Request.URL) {
resp.Visit(link)
}
}
-func parseLinks(html string, origin string) []string {
+func (m *Module) parseLinks(html string, origin string) []string {
var links []string
doc, err := goquery.NewDocumentFromReader(strings.NewReader(html))
if err != nil {
@@ -44,22 +53,26 @@ func parseLinks(html string, origin string) []string {
}
uniqueLinks := make(map[string]bool)
- doc.Find("a").Each(func(i int, s *goquery.Selection) {
- link, _ := s.Attr("href")
- parsedLink, err := originurl.Parse(link)
+ for _, selector := range m.Follow {
+ attr := parseSelectorAttr(selector)
+ doc.Find(selector).Each(func(i int, s *goquery.Selection) {
+ link, _ := s.Attr(attr)
- if err != nil || !isValidLink(parsedLink) {
- return
- }
+ parsedLink, err := originurl.Parse(link)
- absLink := parsedLink.String()
+ if err != nil || !isValidLink(parsedLink) {
+ return
+ }
- if !uniqueLinks[absLink] {
- links = append(links, absLink)
- uniqueLinks[absLink] = true
- }
- })
+ absLink := parsedLink.String()
+
+ if !uniqueLinks[absLink] {
+ links = append(links, absLink)
+ uniqueLinks[absLink] = true
+ }
+ })
+ }
return links
}
@@ -72,4 +85,26 @@ func isValidLink(link *url.URL) bool {
return true
}
-var _ flyscrape.ResponseReceiver = (*Module)(nil)
+func parseSelectorAttr(sel string) string {
+ matches := selectorExpr.FindAllString(sel, -1)
+ if len(matches) == 0 {
+ return "href"
+ }
+
+ attr := attrExpr.FindString(matches[len(matches)-1])
+ if attr == "" {
+ return "href"
+ }
+
+ return attr
+}
+
+var (
+ _ flyscrape.Provisioner = (*Module)(nil)
+ _ flyscrape.ResponseReceiver = (*Module)(nil)
+)
+
+var (
+ selectorExpr = regexp.MustCompile(`\[(.*?)\]`)
+ attrExpr = regexp.MustCompile(`[\w-]+`)
+)
diff --git a/modules/followlinks/followlinks_test.go b/modules/followlinks/followlinks_test.go
index 0a628c3..f3eb4fe 100644
--- a/modules/followlinks/followlinks_test.go
+++ b/modules/followlinks/followlinks_test.go
@@ -47,3 +47,99 @@ func TestFollowLinks(t *testing.T) {
require.Contains(t, urls, "http://www.google.com")
require.Contains(t, urls, "http://www.google.com/baz")
}
+
+func TestFollowSelector(t *testing.T) {
+ var urls []string
+ var mu sync.Mutex
+
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/foo/bar"})
+ scraper.LoadModule(&followlinks.Module{
+ Follow: []string{".next a[href]"},
+ })
+
+ scraper.LoadModule(hook.Module{
+ AdaptTransportFn: func(rt http.RoundTripper) http.RoundTripper {
+ return flyscrape.MockTransport(200, `
+ <a href="/baz">Baz</a>
+ <a href="baz">Baz</a>
+ <div class="next">
+ <a href="http://www.google.com">Google</a>
+ </div>`)
+ },
+ ReceiveResponseFn: func(r *flyscrape.Response) {
+ mu.Lock()
+ urls = append(urls, r.Request.URL)
+ mu.Unlock()
+ },
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 2)
+ require.Contains(t, urls, "http://www.example.com/foo/bar")
+ require.Contains(t, urls, "http://www.google.com")
+}
+
+func TestFollowDataAttr(t *testing.T) {
+ var urls []string
+ var mu sync.Mutex
+
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/foo/bar"})
+ scraper.LoadModule(&followlinks.Module{
+ Follow: []string{"[data-url]"},
+ })
+
+ scraper.LoadModule(hook.Module{
+ AdaptTransportFn: func(rt http.RoundTripper) http.RoundTripper {
+ return flyscrape.MockTransport(200, `
+ <a href="/baz">Baz</a>
+ <a href="baz">Baz</a>
+ <div data-url="http://www.google.com">Google</div>`)
+ },
+ ReceiveResponseFn: func(r *flyscrape.Response) {
+ mu.Lock()
+ urls = append(urls, r.Request.URL)
+ mu.Unlock()
+ },
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 2)
+ require.Contains(t, urls, "http://www.example.com/foo/bar")
+ require.Contains(t, urls, "http://www.google.com")
+}
+
+func TestFollowMultiple(t *testing.T) {
+ var urls []string
+ var mu sync.Mutex
+
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/foo/bar"})
+ scraper.LoadModule(&followlinks.Module{
+ Follow: []string{"a.prev", "a.next"},
+ })
+
+ scraper.LoadModule(hook.Module{
+ AdaptTransportFn: func(rt http.RoundTripper) http.RoundTripper {
+ return flyscrape.MockTransport(200, `
+ <a href="/baz">Baz</a>
+ <a class="prev" href="a">a</a>
+ <a class="next" href="b">b</a>`)
+ },
+ ReceiveResponseFn: func(r *flyscrape.Response) {
+ mu.Lock()
+ urls = append(urls, r.Request.URL)
+ mu.Unlock()
+ },
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 3)
+ require.Contains(t, urls, "http://www.example.com/foo/bar")
+ require.Contains(t, urls, "http://www.example.com/foo/a")
+ require.Contains(t, urls, "http://www.example.com/foo/b")
+}