-
Notifications
You must be signed in to change notification settings - Fork 1
/
crawler.go
70 lines (60 loc) · 1.49 KB
/
crawler.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
package crawler
import (
"fmt"
"net/url"
"github.com/yashmurty/go-web-crawler/core"
)
// Crawl uses fetcher to recursively crawl
// pages starting with url, to a maximum of depth.
func Crawl(url, parentURL string, parentURLs []string, depth int, fetcher core.Fetcher,
store core.Store, done chan bool) {
// Report to caller that we're finished.
if done != nil {
defer func() { done <- true }()
}
if depth <= 0 {
return
}
// Don't fetch the same URL twice.
info := &core.URLInfo{
CrawledStatus: true,
URL: url,
Depth: depth,
ParentURL: parentURL,
ParentURLs: parentURLs,
ChildrenInfo: make([]*core.URLInfo, 0),
}
if store.HasCrawled(url, info) {
return
}
urls, err := fetcher.Fetch(url)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("Depth remaining : %d | Successfully fetched url : %s\n", depth, url)
// Wait for the children crawls.
childrenDone := make(chan bool, 1)
parentURLs = append(parentURLs, url)
for _, u := range urls {
go Crawl(u, url, parentURLs, depth-1, fetcher, store, childrenDone)
}
for i := 0; i < len(urls); i++ {
<-childrenDone
}
return
}
func ExtractHost(inputURL string) (string, string) {
// Parse the URL and panic if there are errors.
u, err := url.Parse(inputURL)
if err != nil {
panic("could not parse url. Please enter a valid url.")
}
if !(u.Scheme == "http" || u.Scheme == "https") {
panic("Missing http(s) scheme")
}
if u.Host == "" {
panic("Missing host")
}
return u.Scheme, u.Host
}