使用goroutine来改变样本吗?

huangapple go评论69阅读模式
英文:

Change the sample by using goroutine?

问题

我找到了一个很好的无效链接检查器的网页。但是如何使用goroutine将其改为完整的示例呢?网页链接是:如何在Golang中爬取网站。代码动态地将要搜索的URL添加到pending切片中,但我在使用goroutine方面遇到了一些困难。

package main
import (
    "crypto/tls"
    "errors"
    "fmt"
    "golang.org/x/net/html"
    "io"
    "net/http"
    "net/url"
    "strings"
    "time"
)
var alreadyCrawledList []string
var pending []string
var brokenLinks []string
const localHostWithPort = "localhost:8080"

func IsLinkInPendingQueue(link string) bool {
    for _, x := range pending {
        if x == link {
            return true
        }
    }
    return false
}

func IsLinkAlreadyCrawled(link string) bool {
    for _, x := range alreadyCrawledList {
        if x == link {
            return true
        }
    }
    return false
}

func AddLinkInAlreadyCrawledList(link string) {
    alreadyCrawledList = append(alreadyCrawledList, link)
}

func AddLinkInPendingQueue(link string) {
    pending = append(pending, link)
}

func AddLinkInBrokenLinksQueue(link string) {
    brokenLinks = append(brokenLinks, link)
}

func main() {
    start := time.Now()
    AddLinkInPendingQueue("http://" + localHostWithPort)
    for count := 0; len(pending) > 0; count++ {
        x := pending[0]
        pending = pending[1:]  //它动态地改变了搜索的URL
        if err := crawlPage(x); err != nil {  //如何使用goroutine?
            t.Errorf(err.Error())
        }
    }
    duration := time.Since(start)
    fmt.Println("________________")
    count = 0
    for _, l := range brokenLinks {
        count++
        fmt.Println(count, "Broken. | ", l)
    }
    fmt.Println("Time taken:", duration)
}

func crawlPage(uri string) error {
    if IsLinkAlreadyCrawled(uri) {
        fmt.Println("Already visited: Ignoring uri | ", uri)
        return nil
    }
    transport := &http.Transport{
        TLSClientConfig: &tls.Config{
            InsecureSkipVerify: true,
        },
    }
    client := http.Client{Transport: transport}
    resp, err := client.Get(uri)
    if err != nil {
        fmt.Println("Got error: ", err.Error())
        return err
    }
    if resp.StatusCode != http.StatusOK {
        AddLinkInBrokenLinksQueue(uri)
        return errors.New(fmt.Sprintf("Got %v instead of 200", resp.StatusCode))
    }
    defer resp.Body.Close()
    links := ParseLinks(resp.Body)
    links = ConvertLinksToLocalHost(links)
    for _, link := range links {
        if !InOurDomain(link) {
            continue
        }
        absolute := FixURL(link, uri)
        if !IsLinkAlreadyCrawled(absolute) && !IsLinkInPendingQueue(absolute) && absolute != uri { //不要两次将页面加入队列!
            AddLinkInPendingQueue(absolute)
        }
    }
    AddLinkInAlreadyCrawledList(uri)
    return nil
}

func InOurDomain(link string) bool {
    uri, err := url.Parse(link)
    if err != nil {
        return false
    }
    if uri.Scheme == "http" || uri.Scheme == "https" {
        if uri.Host == localHostWithPort {
            return true
        }
        return false
    }
    return true
}

func ConvertLinksToLocalHost(links []string) []string {
    var convertedLinks []string
    for _, link := range links {
        convertedLinks = append(convertedLinks, strings.Replace(link, "leantricks.com", localHostWithPort, 1))
    }
    return convertedLinks
}

func FixURL(href, base string) string {
    uri, err := url.Parse(href)
    if err != nil {
        return ""
    }
    baseURL, err := url.Parse(base)
    if err != nil {
        return ""
    }
    uri = baseURL.ResolveReference(uri)
    return uri.String()
}

func ParseLinks(httpBody io.Reader) []string {
    var links []string
    page := html.NewTokenizer(httpBody)
    for {
        tokenType := page.Next()
        if tokenType == html.ErrorToken {
            return links
        }

        token := page.Token()
        switch tokenType {
        case html.StartTagToken:
            fallthrough
        case html.SelfClosingTagToken:
            switch token.DataAtom.String() {
            case "a":
                fallthrough
            case "link":
                fallthrough
            case "script":
                for _, attr := range token.Attr {
                    if attr.Key == "href" {
                        links = append(links, attr.Val)
                    }
                }
            }
        }
    }
}
英文:

I found a good web invalid links checker. But how to change it for a complete sample by using goroutine? The web page is: How To Crawl A Website In Golang. The codes dynamically add the url that will be searched to the pending slice. but I have some difficulties to use goroutine to do it.

package main
import (
"crypto/tls"
"errors"
"fmt"
"golang.org/x/net/html"
"io"
"net/http"
"net/url"
"strings"
"time"
)
var alreadyCrawledList []string
var pending []string
var brokenLinks []string
const localHostWithPort = "localhost:8080"
func IsLinkInPendingQueue(link string) bool {
for _, x := range pending {
if x == link {
return true
}
}
return false
}
func IsLinkAlreadyCrawled(link string) bool {
for _, x := range alreadyCrawledList {
if x == link {
return true
}
}
return false
}
func AddLinkInAlreadyCrawledList(link string) {
alreadyCrawledList = append(alreadyCrawledList, link)
}
func AddLinkInPendingQueue(link string) {
pending = append(pending, link)
}
func AddLinkInBrokenLinksQueue(link string) {
brokenLinks = append(brokenLinks, link)
}
func main() {
start := time.Now()
AddLinkInPendingQueue("http://" + localHostWithPort)
for count := 0; len(pending) > 0; count++ {
x := pending[0]
pending = pending[1:]  //it dynamicly change the search url
if err := crawlPage(x); err != nil {  //how to use it by using goroutine?
t.Errorf(err.Error())
}
}
duration := time.Since(start)
fmt.Println("________________")
count = 0
for _, l := range brokenLinks {
count++
fmt.Println(count, "Broken. | ", l)
}
fmt.Println("Time taken:", duration)
}
func crawlPage(uri string) error {
if IsLinkAlreadyCrawled(uri) {
fmt.Println("Already visited: Ignoring uri | ", uri)
return nil
}
transport := &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
}
client := http.Client{Transport: transport}
resp, err := client.Get(uri)
if err != nil {
fmt.Println("Got error: ", err.Error())
return err
}
if resp.StatusCode != http.StatusOK {
AddLinkInBrokenLinksQueue(uri)
return errors.New(fmt.Sprintf("Got %v instead of 200", resp.StatusCode))
}
defer resp.Body.Close()
links := ParseLinks(resp.Body)
links = ConvertLinksToLocalHost(links)
for _, link := range links {
if !InOurDomain(link) {
continue
}
absolute := FixURL(link, uri)
if !IsLinkAlreadyCrawled(absolute) && !IsLinkInPendingQueue(absolute) && absolute != uri { // Don't enqueue a page twice!
AddLinkInPendingQueue(absolute)
}
}
AddLinkInAlreadyCrawledList(uri)
return nil
}
func InOurDomain(link string) bool {
uri, err := url.Parse(link)
if err != nil {
return false
}
if uri.Scheme == "http" || uri.Scheme == "https" {
if uri.Host == localHostWithPort {
return true
}
return false
}
return true
}
func ConvertLinksToLocalHost(links []string) []string {
var convertedLinks []string
for _, link := range links {
convertedLinks = append(convertedLinks, strings.Replace(link, "leantricks.com", localHostWithPort, 1))
}
return convertedLinks
}
func FixURL(href, base string) string {
uri, err := url.Parse(href)
if err != nil {
return ""
}
baseURL, err := url.Parse(base)
if err != nil {
return ""
}
uri = baseURL.ResolveReference(uri)
return uri.String()
}
func ParseLinks(httpBody io.Reader) []string {
var links []string
page := html.NewTokenizer(httpBody)
for {
tokenType := page.Next()
if tokenType == html.ErrorToken {
return links
}
token := page.Token()
switch tokenType {
case html.StartTagToken:
fallthrough
case html.SelfClosingTagToken:
switch token.DataAtom.String() {
case "a":
fallthrough
case "link":
fallthrough
case "script":
for _, attr := range token.Attr {
if attr.Key == "href" {
links = append(links, attr.Val)
}
}
}
}
}
}

答案1

得分: 1

你可以同时调用crawlPage()函数,并使用互斥锁处理alreadyCrawledListpendingbrokenLinks变量(尽管性能可能不太好)。另一方面,为了提高性能,需要对代码进行大量修改。

我用4个链接进行了快速检查,似乎时间减少了一半。我编写了一个简单的HTTP服务器的示例代码,你可以在这里找到。

谢谢,

  • Anoop
英文:

You could invoke the crawlPage() concurrently and handle alreadyCrawledList, pending and brokenLinks variables with mutexes (not so performant though). On the other hand, the code needs to be modified a lot to get it more performant.

I did a quick check with 4 links and seems to half the duration. I did a sample code with a simple http server and its here

Thanks,

  • Anoop

huangapple
  • 本文由 发表于 2017年2月22日 10:07:13
  • 转载请务必保留本文链接:https://go.coder-hub.com/42381426.html
匿名

发表评论

匿名网友

:?: :razz: :sad: :evil: :!: :smile: :oops: :grin: :eek: :shock: :???: :cool: :lol: :mad: :twisted: :roll: :wink: :idea: :arrow: :neutral: :cry: :mrgreen:

确定