-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathcrawler.go
60 lines (48 loc) · 1.11 KB
/
crawler.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
package internetarchive
import (
"encoding/json"
"fmt"
"net/http"
"strings"
)
type CrawlResult struct {
Items []map[string]interface{}
Count float64
Cursor string
Total float64
}
func addCursor(str string, cursor string) string {
if cursor == "" {
return str
}
index := strings.Index(str, "cursor")
if index == -1 {
return str + fmt.Sprintf("&cursor=%s", cursor)
} else {
return str[:index] + fmt.Sprintf("&cursor=%s", cursor)
}
}
func Crawl(fields []string, collection string, stream chan CrawlResult) {
list := strings.Join(fields, ",")
crawlUrl := fmt.Sprintf("http://archive.org/services/search/v1/scrape?fields=%s&q=collection%%3A%s", list, collection)
result := CrawlResult{}
for {
r, err := http.Get(crawlUrl)
check(err)
result = CrawlResult{}
err = json.NewDecoder(r.Body).Decode(&result)
check(err)
if result.Cursor == "" {
close(stream)
break
}
stream <- result
crawlUrl = addCursor(crawlUrl, result.Cursor)
}
//for _, item := range result.Items{
// date := item["date"].(string)
// index := strings.Index(date, "T")
// fmt.Println(date[:index])
//}
//
}