验证中...
语言: Go
分类: Web开发技术
最后更新于 2018-07-18 23:34
gistfile1.txt
原始数据 复制代码
package main
import (
"golang.org/x/net/html"
"net/http"
"fmt"
"log"
"os"
)
func forEachNode(n *html.Node, pre, post func(n *html.Node)) {
if pre != nil {
pre(n)
}
for c := n.FirstChild; c != nil; c = c.NextSibling {
forEachNode(c, pre, post)
}
if post != nil {
post(n)
}
}
func Extract(url string) ([]string, error) {
resp, err := http.Get(url)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
resp.Body.Close()
return nil, fmt.Errorf("getting %s: %s", url, resp.Status)
}
doc, err := html.Parse(resp.Body)
resp.Body.Close()
if err != nil {
return nil, fmt.Errorf("parsing %s as HTML: %v", url, err)
}
var links []string
visitNode := func(n *html.Node) {
if n.Type == html.ElementNode && n.Data == "a" {
for _, a := range n.Attr {
if a.Key != "href" {
continue
}
link, err := resp.Request.URL.Parse(a.Val)
if err != nil {
continue // ignore bad URLs
}
links = append(links, link.String())
}
}
}
forEachNode(doc, visitNode, nil)
return links, nil
}
func crawl(url string) []string {
list, err := Extract(url)
if err != nil {
log.Print(err)
}
return list
}
func main() {
worklist := make(chan []string)
newLinks := make(chan string)
//初始化的值
go func() {
worklist <- os.Args[1:]
}()
//准备20个crawler并发抓取数据
for i := 0; i < 20; i++ {
go func() {
//等待发送过来的新链接
for link := range newLinks {
foundLinks := crawl(link)
go func() { worklist <- foundLinks }()
}
}()
}
//主控制
existLink := make(map[string]bool)
for list := range worklist {
for _, link := range list {
if !existLink[link] {
fmt.Println(link)
existLink[link] = true
//把这个没处理过的链接发送过去
newLinks <- link
}
}
}
}

评论列表( 0 )

你可以在登录后,发表评论

搜索帮助