summaryrefslogtreecommitdiff
path: root/modules/urlfilter
diff options
context:
space:
mode:
authorPhilipp Tanlak <philipp.tanlak@gmail.com>2023-09-23 17:41:57 +0200
committerPhilipp Tanlak <philipp.tanlak@gmail.com>2023-09-23 17:41:57 +0200
commit08df9258a532b653c243e077e82491dbe62ad854 (patch)
treee72b04dba61e65d3bfb9cdb0ad3a87f5caa95eb3 /modules/urlfilter
parentc6950bcd5cd8fe9e7cc63fde7216a5a9b93b8aa0 (diff)
refactor scraper into modules
Diffstat (limited to 'modules/urlfilter')
-rw-r--r--modules/urlfilter/urlfilter.go85
-rw-r--r--modules/urlfilter/urlfilter_test.go71
2 files changed, 156 insertions, 0 deletions
diff --git a/modules/urlfilter/urlfilter.go b/modules/urlfilter/urlfilter.go
new file mode 100644
index 0000000..14576f0
--- /dev/null
+++ b/modules/urlfilter/urlfilter.go
@@ -0,0 +1,85 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+package urlfilter
+
+import (
+ "regexp"
+
+ "github.com/philippta/flyscrape"
+)
+
+func init() {
+ flyscrape.RegisterModule(new(Module))
+}
+
+type Module struct {
+ URL string `json:"url"`
+ AllowedURLs []string `json:"allowedURLs"`
+ BlockedURLs []string `json:"blockedURLs"`
+
+ allowedURLsRE []*regexp.Regexp
+ blockedURLsRE []*regexp.Regexp
+}
+
+func (m *Module) ID() string {
+ return "urlfilter"
+}
+
+func (m *Module) OnLoad(v flyscrape.Visitor) {
+ for _, pat := range m.AllowedURLs {
+ re, err := regexp.Compile(pat)
+ if err != nil {
+ continue
+ }
+ m.allowedURLsRE = append(m.allowedURLsRE, re)
+ }
+
+ for _, pat := range m.BlockedURLs {
+ re, err := regexp.Compile(pat)
+ if err != nil {
+ continue
+ }
+ m.blockedURLsRE = append(m.blockedURLsRE, re)
+ }
+}
+
+func (m *Module) CanRequest(rawurl string, depth int) bool {
+ // allow root url
+ if rawurl == m.URL {
+ return true
+ }
+
+ // allow if no filter is set
+ if len(m.allowedURLsRE) == 0 && len(m.blockedURLsRE) == 0 {
+ return true
+ }
+
+ ok := false
+ if len(m.allowedURLsRE) == 0 {
+ ok = true
+ }
+
+ for _, re := range m.allowedURLsRE {
+ if re.MatchString(rawurl) {
+ ok = true
+ break
+ }
+ }
+
+ for _, re := range m.blockedURLsRE {
+ if re.MatchString(rawurl) {
+ ok = false
+ break
+ }
+ }
+
+ return ok
+}
+
+var (
+ _ flyscrape.Module = (*Module)(nil)
+ _ flyscrape.CanRequest = (*Module)(nil)
+ _ flyscrape.OnLoad = (*Module)(nil)
+)
diff --git a/modules/urlfilter/urlfilter_test.go b/modules/urlfilter/urlfilter_test.go
new file mode 100644
index 0000000..e383a32
--- /dev/null
+++ b/modules/urlfilter/urlfilter_test.go
@@ -0,0 +1,71 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+package urlfilter_test
+
+import (
+ "testing"
+
+ "github.com/philippta/flyscrape"
+ "github.com/philippta/flyscrape/modules/followlinks"
+ "github.com/philippta/flyscrape/modules/starturl"
+ "github.com/philippta/flyscrape/modules/urlfilter"
+ "github.com/stretchr/testify/require"
+)
+
+func TestURLFilterAllowed(t *testing.T) {
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/"})
+ scraper.LoadModule(&followlinks.Module{})
+ scraper.LoadModule(&urlfilter.Module{
+ URL: "http://www.example.com/",
+ AllowedURLs: []string{`/foo\?id=\d+`, `/bar$`},
+ })
+
+ scraper.SetTransport(flyscrape.MockTransport(200, `
+ <a href="foo?id=123">123</a>
+ <a href="foo?id=ABC">ABC</a>
+ <a href="/bar">bar</a>
+ <a href="/barz">barz</a>`))
+
+ var urls []string
+ scraper.OnRequest(func(req *flyscrape.Request) {
+ urls = append(urls, req.URL)
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 3)
+ require.Contains(t, urls, "http://www.example.com/")
+ require.Contains(t, urls, "http://www.example.com/foo?id=123")
+ require.Contains(t, urls, "http://www.example.com/bar")
+}
+
+func TestURLFilterBlocked(t *testing.T) {
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com/"})
+ scraper.LoadModule(&followlinks.Module{})
+ scraper.LoadModule(&urlfilter.Module{
+ URL: "http://www.example.com/",
+ BlockedURLs: []string{`/foo\?id=\d+`, `/bar$`},
+ })
+
+ scraper.SetTransport(flyscrape.MockTransport(200, `
+ <a href="foo?id=123">123</a>
+ <a href="foo?id=ABC">ABC</a>
+ <a href="/bar">bar</a>
+ <a href="/barz">barz</a>`))
+
+ var urls []string
+ scraper.OnRequest(func(req *flyscrape.Request) {
+ urls = append(urls, req.URL)
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 3)
+ require.Contains(t, urls, "http://www.example.com/")
+ require.Contains(t, urls, "http://www.example.com/foo?id=ABC")
+ require.Contains(t, urls, "http://www.example.com/barz")
+}