-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcrawlPage.go
62 lines (49 loc) · 1.29 KB
/
crawlPage.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
package main
import (
"fmt"
"net/url"
"time"
)
func crawlPage(rawBaseURL, rawCurrentURL string, pages map[string]int, depth int) map[string]int {
if depth == 0 {
return pages
}
base, err := url.Parse(rawBaseURL)
if err != nil {
fmt.Printf("error parsing base URL during crawl: %v", err)
}
current, err := url.Parse(rawCurrentURL)
if err != nil {
fmt.Printf("error parsing current URL during crawl: %v", err)
}
if base.Host != current.Host {
return pages
}
normalizedCurrentURL, err := normalizeURL(rawCurrentURL)
if err != nil {
fmt.Printf("error normalizing current URL: %v", err)
}
_, ok := pages[normalizedCurrentURL]
if ok {
pages[normalizedCurrentURL]++
return pages
} else {
pages[normalizedCurrentURL] = 1
}
// Print what URL we are crawling and sleep for some time
// to not overwork the server
fmt.Printf("Crawling %s\n", normalizedCurrentURL)
time.Sleep(500 * time.Millisecond)
currentHTML, err := getHTML(normalizedCurrentURL)
if err != nil {
fmt.Printf("error getting HTML for current URL: %v", err)
}
allURLs, err := getURLsFromHTML(currentHTML, rawBaseURL)
if err != nil {
fmt.Printf("error getting all URLs from HTML of current URL: %v", err)
}
for _, url := range allURLs {
crawlPage(rawBaseURL, url, pages, depth-1)
}
return pages
}