summaryrefslogtreecommitdiff
path: root/modules/domainfilter/domainfilter_test.go
diff options
context:
space:
mode:
authorPhilipp Tanlak <philipp.tanlak@gmail.com>2023-09-23 17:41:57 +0200
committerPhilipp Tanlak <philipp.tanlak@gmail.com>2023-09-23 17:41:57 +0200
commit08df9258a532b653c243e077e82491dbe62ad854 (patch)
treee72b04dba61e65d3bfb9cdb0ad3a87f5caa95eb3 /modules/domainfilter/domainfilter_test.go
parentc6950bcd5cd8fe9e7cc63fde7216a5a9b93b8aa0 (diff)
refactor scraper into modules
Diffstat (limited to 'modules/domainfilter/domainfilter_test.go')
-rw-r--r--modules/domainfilter/domainfilter_test.go92
1 files changed, 92 insertions, 0 deletions
diff --git a/modules/domainfilter/domainfilter_test.go b/modules/domainfilter/domainfilter_test.go
new file mode 100644
index 0000000..97bdc9c
--- /dev/null
+++ b/modules/domainfilter/domainfilter_test.go
@@ -0,0 +1,92 @@
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+package domainfilter_test
+
+import (
+ "testing"
+
+ "github.com/philippta/flyscrape"
+ "github.com/philippta/flyscrape/modules/domainfilter"
+ "github.com/philippta/flyscrape/modules/followlinks"
+ "github.com/philippta/flyscrape/modules/starturl"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDomainfilterAllowed(t *testing.T) {
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com"})
+ scraper.LoadModule(&followlinks.Module{})
+ scraper.LoadModule(&domainfilter.Module{
+ URL: "http://www.example.com",
+ AllowedDomains: []string{"www.google.com"},
+ })
+
+ scraper.SetTransport(flyscrape.MockTransport(200, `
+ <a href="http://www.google.com">Google</a>
+ <a href="http://www.duckduckgo.com">DuckDuckGo</a>`))
+
+ var urls []string
+ scraper.OnRequest(func(req *flyscrape.Request) {
+ urls = append(urls, req.URL)
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 2)
+ require.Contains(t, urls, "http://www.example.com")
+ require.Contains(t, urls, "http://www.google.com/")
+}
+
+func TestDomainfilterAllowedAll(t *testing.T) {
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com"})
+ scraper.LoadModule(&followlinks.Module{})
+ scraper.LoadModule(&domainfilter.Module{
+ URL: "http://www.example.com",
+ AllowedDomains: []string{"*"},
+ })
+
+ scraper.SetTransport(flyscrape.MockTransport(200, `
+ <a href="http://www.google.com">Google</a>
+ <a href="http://www.duckduckgo.com">DuckDuckGo</a>`))
+
+ var urls []string
+ scraper.OnRequest(func(req *flyscrape.Request) {
+ urls = append(urls, req.URL)
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 3)
+ require.Contains(t, urls, "http://www.example.com")
+ require.Contains(t, urls, "http://www.duckduckgo.com/")
+ require.Contains(t, urls, "http://www.google.com/")
+}
+
+func TestDomainfilterBlocked(t *testing.T) {
+ scraper := flyscrape.NewScraper()
+ scraper.LoadModule(&starturl.Module{URL: "http://www.example.com"})
+ scraper.LoadModule(&followlinks.Module{})
+ scraper.LoadModule(&domainfilter.Module{
+ URL: "http://www.example.com",
+ AllowedDomains: []string{"*"},
+ BlockedDomains: []string{"www.google.com"},
+ })
+
+ scraper.SetTransport(flyscrape.MockTransport(200, `
+ <a href="http://www.google.com">Google</a>
+ <a href="http://www.duckduckgo.com">DuckDuckGo</a>`))
+
+ var urls []string
+ scraper.OnRequest(func(req *flyscrape.Request) {
+ urls = append(urls, req.URL)
+ })
+
+ scraper.Run()
+
+ require.Len(t, urls, 2)
+ require.Contains(t, urls, "http://www.example.com")
+ require.Contains(t, urls, "http://www.duckduckgo.com/")
+}