-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathchallenge01_test.go
63 lines (48 loc) · 1.75 KB
/
challenge01_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
package challenge01
import (
"testing"
"time"
"dojo/challenge01/fetcher"
"github.com/stretchr/testify/assert"
)
// Safety check. If this test fails you broke something in the Crawler.
func Test_Challenge01_00_Basic_Functionality(t *testing.T) {
f := fetcher.Fake()
c := New()
c.Crawl("https://golang.org/", 4, f)
}
// The fetcher used in this test will panic when the same URL is fetched twice.
// Extend the Crawler, so that it fetches every URL only once.
func Test_Challenge01_01_Do_not_Fetch_URLs_Twice(t *testing.T) {
f := fetcher.Distinct()
c := New()
c.Crawl("https://golang.org/", 4, f)
assert.True(t, f.Completed(), "Not all URLs fetched")
}
// The fetcher used in this test also panics when an URL is fetched twice and additionally
// is very slow. Use concurrency to speed up the crawler and fetch multiple URLs simultaneously.
// The test fails after a timeout is reached to check if you are able to speed up the fetching process.
func Test_Challenge01_02_Be_More_Efficient(t *testing.T) {
f := fetcher.Slow()
done := make(chan bool)
go func() {
c := New()
c.Crawl("https://golang.org/", 4, f)
close(done)
}()
select {
case <-done:
case <-time.After(5 * time.Second):
assert.Fail(t, "too slow!")
}
assert.True(t, f.Completed(), "Not all URLs fetched")
}
// The fetcher in this test simulates a server with a tight rate limit from a client, if you fetch more often
// than once per second it will fail. Introduce a rate limiter in the crawler that prevents this from happening
// without losing the functionality in the
func Test_Challenge01_03_RateLimit_Requests(t *testing.T) {
f := fetcher.RateLimited()
c := NewWithRateLimit(2 * time.Second)
c.Crawl("https://golang.org/", 4, f)
assert.True(t, f.Completed(), "Not all URLs fetched")
}