From 08df9258a532b653c243e077e82491dbe62ad854 Mon Sep 17 00:00:00 2001 From: Philipp Tanlak Date: Sat, 23 Sep 2023 17:41:57 +0200 Subject: refactor scraper into modules --- modules/domainfilter/domainfilter.go | 62 +++++++++++++++++++++ modules/domainfilter/domainfilter_test.go | 92 +++++++++++++++++++++++++++++++ 2 files changed, 154 insertions(+) create mode 100644 modules/domainfilter/domainfilter.go create mode 100644 modules/domainfilter/domainfilter_test.go (limited to 'modules/domainfilter') diff --git a/modules/domainfilter/domainfilter.go b/modules/domainfilter/domainfilter.go new file mode 100644 index 0000000..b892882 --- /dev/null +++ b/modules/domainfilter/domainfilter.go @@ -0,0 +1,62 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. + +package domainfilter + +import ( + "github.com/nlnwa/whatwg-url/url" + "github.com/philippta/flyscrape" +) + +func init() { + flyscrape.RegisterModule(new(Module)) +} + +type Module struct { + URL string `json:"url"` + AllowedDomains []string `json:"allowedDomains"` + BlockedDomains []string `json:"blockedDomains"` +} + +func (m *Module) ID() string { + return "domainfilter" +} + +func (m *Module) OnLoad(v flyscrape.Visitor) { + if u, err := url.Parse(m.URL); err == nil { + m.AllowedDomains = append(m.AllowedDomains, u.Host()) + } +} + +func (m *Module) CanRequest(rawurl string, depth int) bool { + u, err := url.Parse(rawurl) + if err != nil { + return false + } + + host := u.Host() + ok := false + + for _, domain := range m.AllowedDomains { + if domain == "*" || host == domain { + ok = true + break + } + } + + for _, domain := range m.BlockedDomains { + if host == domain { + ok = false + break + } + } + + return ok +} + +var ( + _ flyscrape.Module = (*Module)(nil) + _ flyscrape.CanRequest = (*Module)(nil) + _ flyscrape.OnLoad = (*Module)(nil) +) diff --git a/modules/domainfilter/domainfilter_test.go b/modules/domainfilter/domainfilter_test.go new file mode 100644 index 0000000..97bdc9c --- /dev/null +++ b/modules/domainfilter/domainfilter_test.go @@ -0,0 +1,92 @@ +// This Source Code Form is subject to the terms of the Mozilla Public +// License, v. 2.0. If a copy of the MPL was not distributed with this +// file, You can obtain one at http://mozilla.org/MPL/2.0/. + +package domainfilter_test + +import ( + "testing" + + "github.com/philippta/flyscrape" + "github.com/philippta/flyscrape/modules/domainfilter" + "github.com/philippta/flyscrape/modules/followlinks" + "github.com/philippta/flyscrape/modules/starturl" + "github.com/stretchr/testify/require" +) + +func TestDomainfilterAllowed(t *testing.T) { + scraper := flyscrape.NewScraper() + scraper.LoadModule(&starturl.Module{URL: "http://www.example.com"}) + scraper.LoadModule(&followlinks.Module{}) + scraper.LoadModule(&domainfilter.Module{ + URL: "http://www.example.com", + AllowedDomains: []string{"www.google.com"}, + }) + + scraper.SetTransport(flyscrape.MockTransport(200, ` + Google + DuckDuckGo`)) + + var urls []string + scraper.OnRequest(func(req *flyscrape.Request) { + urls = append(urls, req.URL) + }) + + scraper.Run() + + require.Len(t, urls, 2) + require.Contains(t, urls, "http://www.example.com") + require.Contains(t, urls, "http://www.google.com/") +} + +func TestDomainfilterAllowedAll(t *testing.T) { + scraper := flyscrape.NewScraper() + scraper.LoadModule(&starturl.Module{URL: "http://www.example.com"}) + scraper.LoadModule(&followlinks.Module{}) + scraper.LoadModule(&domainfilter.Module{ + URL: "http://www.example.com", + AllowedDomains: []string{"*"}, + }) + + scraper.SetTransport(flyscrape.MockTransport(200, ` + Google + DuckDuckGo`)) + + var urls []string + scraper.OnRequest(func(req *flyscrape.Request) { + urls = append(urls, req.URL) + }) + + scraper.Run() + + require.Len(t, urls, 3) + require.Contains(t, urls, "http://www.example.com") + require.Contains(t, urls, "http://www.duckduckgo.com/") + require.Contains(t, urls, "http://www.google.com/") +} + +func TestDomainfilterBlocked(t *testing.T) { + scraper := flyscrape.NewScraper() + scraper.LoadModule(&starturl.Module{URL: "http://www.example.com"}) + scraper.LoadModule(&followlinks.Module{}) + scraper.LoadModule(&domainfilter.Module{ + URL: "http://www.example.com", + AllowedDomains: []string{"*"}, + BlockedDomains: []string{"www.google.com"}, + }) + + scraper.SetTransport(flyscrape.MockTransport(200, ` + Google + DuckDuckGo`)) + + var urls []string + scraper.OnRequest(func(req *flyscrape.Request) { + urls = append(urls, req.URL) + }) + + scraper.Run() + + require.Len(t, urls, 2) + require.Contains(t, urls, "http://www.example.com") + require.Contains(t, urls, "http://www.duckduckgo.com/") +} -- cgit v1.2.3